diff --git a/.github/workflows/backend-docker-publish.yml b/.github/workflows/backend-docker-publish.yml new file mode 100644 index 00000000..88c07be0 --- /dev/null +++ b/.github/workflows/backend-docker-publish.yml @@ -0,0 +1,87 @@ +name: Publish Backend Multi-Architecture Image to DockerHub +on: + push: + branches: + - main + paths: + - 'Backend/**' + workflow_dispatch: +env: + REGISTRY: docker.io + IMAGE_NAME: madeofpendletonwool/pinepods_backend +jobs: + build-and-push-x86: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + id-token: write + steps: + - name: Checkout code + uses: actions/checkout@v4 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + - name: Log in to Docker Hub + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_KEY }} + - name: Build and push x86 image + run: | + cd Backend + docker build --platform linux/amd64 -t ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest-amd64 -f dockerfile . + docker push ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest-amd64 + + build-and-push-arm64: + runs-on: ubuntu-24.04-arm + permissions: + contents: read + packages: write + id-token: write + steps: + - name: Checkout code + uses: actions/checkout@v4 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + - name: Log in to Docker Hub + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_KEY }} + - name: Build and push ARM64 image + run: | + cd Backend + docker build --platform linux/arm64 -t ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest-arm64 -f dockerfile . + docker push ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest-arm64 + + create-manifests: + needs: [build-and-push-x86, build-and-push-arm64] + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + id-token: write + steps: + - name: Checkout code + uses: actions/checkout@v4 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + - name: Log in to Docker Hub + uses: docker/login-action@v2 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_KEY }} + + - name: Create and push Docker manifest for the latest tag + run: | + sleep 10 + # Pull the images first to ensure they're available + docker pull ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest-amd64 + docker pull ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest-arm64 + + # Create and push manifest + docker manifest create ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest \ + --amend ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest-amd64 \ + --amend ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest-arm64 + + docker manifest push ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest diff --git a/.github/workflows/backwards-compatibility.yml b/.github/workflows/backwards-compatibility.yml new file mode 100644 index 00000000..63755223 --- /dev/null +++ b/.github/workflows/backwards-compatibility.yml @@ -0,0 +1,406 @@ +name: Database Backwards Compatibility Test + +on: + push: + branches: [main] + pull_request: + branches: [main] + +env: + TEST_DB_PASSWORD: "test_password_123!" + TEST_DB_NAME: "pinepods_test_db" + +jobs: + test-mysql-compatibility: + runs-on: ubuntu-latest + services: + mysql: + image: mysql:latest + env: + MYSQL_ROOT_PASSWORD: test_password_123! + MYSQL_DATABASE: pinepods_test_db + ports: + - 3306:3306 + options: >- + --health-cmd="mysqladmin ping" + --health-interval=10s + --health-timeout=5s + --health-retries=3 + + valkey: + image: valkey/valkey:8-alpine + ports: + - 6379:6379 + + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Get previous release tag + id: get_previous_tag + run: | + # Get the latest stable release (exclude rc, alpha, beta) + PREVIOUS_TAG=$(git tag --sort=-version:refname | grep -E '^[0-9]+\.[0-9]+\.[0-9]+$' | head -n 1) + + if [ -z "$PREVIOUS_TAG" ]; then + echo "No stable release tag found, using 0.7.9 as baseline" + PREVIOUS_TAG="0.7.9" + fi + + echo "previous_tag=$PREVIOUS_TAG" >> $GITHUB_OUTPUT + echo "Using previous tag: $PREVIOUS_TAG" + + - name: Start previous PinePods version + run: | + echo "🚀 Starting PinePods ${{ steps.get_previous_tag.outputs.previous_tag }}" + + # Create docker-compose for previous version + cat > docker-compose.previous.yml << EOF + version: '3.8' + services: + pinepods_previous: + image: madeofpendletonwool/pinepods:${{ steps.get_previous_tag.outputs.previous_tag }} + environment: + DB_TYPE: mysql + DB_HOST: mysql + DB_PORT: 3306 + DB_USER: root + DB_PASSWORD: ${{ env.TEST_DB_PASSWORD }} + DB_NAME: ${{ env.TEST_DB_NAME }} + VALKEY_HOST: valkey + VALKEY_PORT: 6379 + HOSTNAME: 'http://localhost:8040' + DEBUG_MODE: true + SEARCH_API_URL: 'https://search.pinepods.online/api/search' + PEOPLE_API_URL: 'https://people.pinepods.online' + ports: + - "8040:8040" + depends_on: + - mysql + - valkey + networks: + - test_network + + mysql: + image: mysql:8.0 + environment: + MYSQL_ROOT_PASSWORD: ${{ env.TEST_DB_PASSWORD }} + MYSQL_DATABASE: ${{ env.TEST_DB_NAME }} + networks: + - test_network + + valkey: + image: valkey/valkey:8-alpine + networks: + - test_network + + networks: + test_network: + driver: bridge + EOF + + # Start previous version and wait for it to be ready + docker compose -f docker-compose.previous.yml up -d + + # Wait for services to be ready + echo "⏳ Waiting for previous version to initialize..." + sleep 30 + + # Check if previous version is responding + timeout 60 bash -c 'while ! curl -f http://localhost:8040/api/pinepods_check; do sleep 5; done' + echo "✅ Previous version (${{ steps.get_previous_tag.outputs.previous_tag }}) is ready" + + - name: Stop previous version + run: | + echo "🛑 Stopping previous PinePods version" + docker compose -f docker-compose.previous.yml stop pinepods_previous + echo "✅ Previous version stopped (database preserved)" + + + - name: Build current version + run: | + echo "🔨 Building current PinePods version from source" + docker build -f dockerfile -t pinepods-current:test . + echo "✅ Build complete" + + - name: Start current version + run: | + + # Create docker-compose for current version + cat > docker-compose.current.yml << EOF + version: '3.8' + services: + pinepods_current: + image: pinepods-current:test + environment: + DB_TYPE: mysql + DB_HOST: mysql + DB_PORT: 3306 + DB_USER: root + DB_PASSWORD: ${{ env.TEST_DB_PASSWORD }} + DB_NAME: ${{ env.TEST_DB_NAME }} + VALKEY_HOST: valkey + VALKEY_PORT: 6379 + HOSTNAME: 'http://localhost:8040' + DEBUG_MODE: true + SEARCH_API_URL: 'https://search.pinepods.online/api/search' + PEOPLE_API_URL: 'https://people.pinepods.online' + ports: + - "8040:8040" + depends_on: + - mysql + - valkey + networks: + - test_network + + mysql: + image: mysql:8.0 + environment: + MYSQL_ROOT_PASSWORD: ${{ env.TEST_DB_PASSWORD }} + MYSQL_DATABASE: ${{ env.TEST_DB_NAME }} + networks: + - test_network + + valkey: + image: valkey/valkey:8-alpine + networks: + - test_network + + networks: + test_network: + driver: bridge + EOF + + echo "🚀 Starting current PinePods version" + # Start current version + docker compose -f docker-compose.current.yml up -d pinepods_current + + # Wait for current version to be ready + echo "⏳ Waiting for current version to initialize..." + sleep 60 + + # Check if current version is responding + timeout 120 bash -c 'while ! curl -f http://localhost:8040/api/pinepods_check; do echo "Waiting for current version..."; sleep 10; done' + echo "✅ Current version is ready" + + - name: Build validator and validate upgraded database + run: | + echo "🔨 Building database validator" + docker build -f Dockerfile.validator -t pinepods-validator . + + echo "🔍 Validating upgraded database schema" + docker run --rm --network pinepods_test_network \ + -e DB_TYPE=mysql \ + -e DB_HOST=mysql \ + -e DB_PORT=3306 \ + -e DB_USER=root \ + -e DB_PASSWORD=${{ env.TEST_DB_PASSWORD }} \ + -e DB_NAME=${{ env.TEST_DB_NAME }} \ + pinepods-validator + + - name: Test basic functionality + run: | + echo "🧪 Testing basic API functionality" + + # Test health endpoint + curl -f http://localhost:8040/api/health || exit 1 + + # Test pinepods check endpoint + curl -f http://localhost:8040/api/pinepods_check || exit 1 + + echo "✅ Basic functionality tests passed" + + - name: Cleanup + if: always() + run: | + echo "🧹 Cleaning up test environment" + docker compose -f docker-compose.previous.yml down -v || true + docker compose -f docker-compose.current.yml down -v || true + + test-postgresql-compatibility: + runs-on: ubuntu-latest + services: + postgres: + image: postgres:15 + env: + POSTGRES_PASSWORD: test_password_123! + POSTGRES_DB: pinepods_test_db + ports: + - 5432:5432 + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + + valkey: + image: valkey/valkey:8-alpine + ports: + - 6379:6379 + + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Get previous release tag + id: get_previous_tag + run: | + # Get the latest stable release (exclude rc, alpha, beta) + PREVIOUS_TAG=$(git tag --sort=-version:refname | grep -E '^[0-9]+\.[0-9]+\.[0-9]+$' | head -n 1) + + if [ -z "$PREVIOUS_TAG" ]; then + echo "No stable release tag found, using 0.7.9 as baseline" + PREVIOUS_TAG="0.7.9" + fi + + echo "previous_tag=$PREVIOUS_TAG" >> $GITHUB_OUTPUT + echo "Using previous tag: $PREVIOUS_TAG" + + - name: Start previous PinePods version + run: | + echo "🚀 Starting PinePods ${{ steps.get_previous_tag.outputs.previous_tag }} (PostgreSQL)" + + cat > docker-compose.postgres-previous.yml << EOF + version: '3.8' + services: + pinepods_previous: + image: madeofpendletonwool/pinepods:${{ steps.get_previous_tag.outputs.previous_tag }} + environment: + DB_TYPE: postgresql + DB_HOST: postgres + DB_PORT: 5432 + DB_USER: postgres + DB_PASSWORD: ${{ env.TEST_DB_PASSWORD }} + DB_NAME: ${{ env.TEST_DB_NAME }} + VALKEY_HOST: valkey + VALKEY_PORT: 6379 + HOSTNAME: 'http://localhost:8040' + DEBUG_MODE: true + SEARCH_API_URL: 'https://search.pinepods.online/api/search' + PEOPLE_API_URL: 'https://people.pinepods.online' + ports: + - "8040:8040" + depends_on: + - postgres + - valkey + networks: + - test_network + + postgres: + image: postgres:latest + environment: + POSTGRES_PASSWORD: ${{ env.TEST_DB_PASSWORD }} + POSTGRES_DB: ${{ env.TEST_DB_NAME }} + networks: + - test_network + + valkey: + image: valkey/valkey:8-alpine + networks: + - test_network + + networks: + test_network: + driver: bridge + EOF + + docker compose -f docker-compose.postgres-previous.yml up -d + sleep 30 + timeout 60 bash -c 'while ! curl -f http://localhost:8040/api/pinepods_check; do sleep 5; done' + + - name: Stop previous version + run: | + echo "🛑 Stopping previous PinePods version" + docker compose -f docker-compose.postgres-previous.yml stop pinepods_previous + echo "✅ Previous version stopped (database preserved)" + + - name: Build current version (PostgreSQL) + run: | + echo "🔨 Building current PinePods version from source" + docker build -f dockerfile -t pinepods-current:test . + echo "✅ Build complete" + + - name: Test current version (PostgreSQL) + run: | + echo "🚀 Starting current PinePods version with PostgreSQL" + + # Create docker-compose for current version + cat > docker-compose.postgres-current.yml << EOF + version: '3.8' + services: + pinepods_current: + image: pinepods-current:test + environment: + DB_TYPE: postgresql + DB_HOST: postgres + DB_PORT: 5432 + DB_USER: postgres + DB_PASSWORD: ${{ env.TEST_DB_PASSWORD }} + DB_NAME: ${{ env.TEST_DB_NAME }} + VALKEY_HOST: valkey + VALKEY_PORT: 6379 + HOSTNAME: 'http://localhost:8040' + DEBUG_MODE: true + SEARCH_API_URL: 'https://search.pinepods.online/api/search' + PEOPLE_API_URL: 'https://people.pinepods.online' + ports: + - "8040:8040" + depends_on: + - postgres + - valkey + networks: + - test_network + + postgres: + image: postgres:latest + environment: + POSTGRES_PASSWORD: ${{ env.TEST_DB_PASSWORD }} + POSTGRES_DB: ${{ env.TEST_DB_NAME }} + networks: + - test_network + + valkey: + image: valkey/valkey:8-alpine + networks: + - test_network + + networks: + test_network: + driver: bridge + EOF + + # Start current version + docker compose -f docker-compose.postgres-current.yml up -d pinepods_current + + # Wait for current version to be ready + echo "⏳ Waiting for current version to initialize..." + sleep 60 + + # Check if current version is responding + timeout 120 bash -c 'while ! curl -f http://localhost:8040/api/pinepods_check; do echo "Waiting for current version..."; sleep 10; done' + echo "✅ Current version is ready" + + - name: Build validator and validate upgraded database (PostgreSQL) + run: | + echo "🔨 Building PostgreSQL database validator" + docker build -f Dockerfile.validator.postgres -t pinepods-validator-postgres . + + echo "🔍 Validating upgraded database schema" + docker run --rm --network pinepods_test_network \ + -e DB_TYPE=postgresql \ + -e DB_HOST=postgres \ + -e DB_PORT=5432 \ + -e DB_USER=postgres \ + -e DB_PASSWORD=${{ env.TEST_DB_PASSWORD }} \ + -e DB_NAME=${{ env.TEST_DB_NAME }} \ + pinepods-validator-postgres + + - name: Cleanup + if: always() + run: | + docker compose -f docker-compose.postgres-previous.yml down -v || true + docker compose -f docker-compose.postgres-current.yml down -v || true diff --git a/.github/workflows/build-android-app.yml b/.github/workflows/build-android-app.yml deleted file mode 100644 index 57734a52..00000000 --- a/.github/workflows/build-android-app.yml +++ /dev/null @@ -1,155 +0,0 @@ -name: Build Android Pinepods App (Legacy Tauri - Deprecated) - -on: - # This workflow is deprecated in favor of build-android-flutter.yml - # release: - # types: [published] - workflow_dispatch: - inputs: - version: - description: "Manual override version tag (optional)" - required: false - -jobs: - build: - name: Build Android Release - runs-on: ubuntu-latest - - steps: - - name: Set Image Tag (Unix) - run: echo "IMAGE_TAG=${{ github.event.release.tag_name || github.event.inputs.version || 'latest' }}" >> $GITHUB_ENV - - - name: Setup | Checkout - uses: actions/checkout@v3 - - - name: Set up JDK 17 - uses: actions/setup-java@v3 - with: - java-version: "17" - distribution: "temurin" - - - name: Setup Android SDK - uses: android-actions/setup-android@v3 - - - uses: nttld/setup-ndk@v1 - id: setup-ndk - with: - ndk-version: r27b - add-to-path: false - - - uses: hecrj/setup-rust-action@v2 - with: - rust-version: 1.86 - targets: wasm32-unknown-unknown - - - name: Install cargo-binstall - uses: cargo-bins/cargo-binstall@main - - - name: Depends install - if: ${{ env.DEPENDS_SETUP == 'true' }} - run: | - sudo apt update - sudo apt install -qy libgtk-3-dev - sudo apt-get install -y libwebkit2gtk-4.0-dev libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf - - - name: wasm-addition - run: | - rustup target add wasm32-unknown-unknown - rustup target add aarch64-linux-android - - - name: Install Trunk - run: | - cargo binstall trunk -y - - - name: Install Tauri - run: | - cargo install tauri-cli@2.0.0-rc.16 --locked - - - name: Update Tauri version (UNIX) - run: | - cd web/src-tauri - # Use different sed syntax for macOS - if [[ "$OSTYPE" == "darwin"* ]]; then - sed -i '' "s/\"version\": \".*\"/\"version\": \"${IMAGE_TAG}\"/" tauri.conf.json - else - sed -i "s/\"version\": \".*\"/\"version\": \"${IMAGE_TAG}\"/" tauri.conf.json - fi - cat tauri.conf.json - shell: bash - - - name: setup Android signing - run: | - echo "keyAlias=${{ secrets.ANDROID_KEY_ALIAS }}" > web/src-tauri/gen/android/keystore.properties - echo "password=${{ secrets.ANDROID_KEY_PW }}" >> web/src-tauri/gen/android/keystore.properties - base64 -d <<< "${{ secrets.ANDROID_KEY_BASE64 }}" > $RUNNER_TEMP/keystore.jks - echo "storeFile=$RUNNER_TEMP/keystore.jks" >> web/src-tauri/gen/android/keystore.properties - - - name: Setup Android SDK - uses: android-actions/setup-android@v3 - - - name: setup Android signing - run: | - cd web/src-tauri/gen/android - echo "keyAlias=${{ secrets.ANDROID_KEY_ALIAS }}" > keystore.properties - echo "password=${{ secrets.ANDROID_KEY_PW }}" >> keystore.properties - base64 -d <<< "${{ secrets.ANDROID_KEY_BASE64 }}" > $RUNNER_TEMP/keystore.jks - echo "storeFile=$RUNNER_TEMP/keystore.jks" >> keystore.properties - - - name: Build | Compile (UNIX) - run: | - export ANDROID_HOME=$HOME/.android/sdk - export NDK_HOME=$NDK_JOB_HOME - export RUSTFLAGS="--cfg=web_sys_unstable_apis" # Add this line - cd web/src-tauri - cargo tauri icon icons/Square512x512.png - cat tauri.conf.json - cargo tauri android init - cargo tauri android build --apk - cargo tauri android build --aab - shell: bash - env: - NDK_JOB_HOME: ${{ steps.setup-ndk.outputs.ndk-path }} - - - name: Archive build 1 (apk) - uses: actions/upload-artifact@v4 - with: - name: apk-build - path: ./web/src-tauri/gen/android/app/build/outputs/apk/universal/release/app-universal-release.apk - - - name: Archive build 2 (aab) - uses: actions/upload-artifact@v4 - with: - name: aab-build - path: ./web/src-tauri/gen/android/app/build/outputs/bundle/universalRelease/app-universal-release.aab - - # - name: Archive build 2 (aab) - # uses: actions/upload-artifact@v3 - # with: - # name: ${{ matrix.os }}-build - # path: ./web/src-tauri/gen/android/app/build/outputs/apk/universal/release/app-universal-release.apk - # if: ${{ matrix.os == 'ubuntu-latest' }} - - # - name: Archive build 2 (Ubuntu) - # uses: actions/upload-artifact@v3 - # with: - # name: ${{ matrix.os }}-build - # path: ./web/src-tauri/target/release/bundle/appimage/${{ env.ARTIFACT_NAME2 }} - # if: ${{ matrix.os == 'ubuntu-latest' }} - - # - name: Archive build 3 (Ubuntu) - # uses: actions/upload-artifact@v3 - # with: - # name: ${{ matrix.os }}-build - # path: ./web/src-tauri/target/release/bundle/rpm/${{ env.ARTIFACT_NAME3 }} - # if: ${{ matrix.os == 'ubuntu-latest' }} - - # - name: Upload release asset (Ubuntu - DEB) - # if: github.event_name == 'release' && matrix.os == 'ubuntu-latest' - # uses: actions/upload-release-asset@v1 - # env: - # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - # with: - # upload_url: ${{ github.event.release.upload_url }} - # asset_path: ./web/src-tauri/target/release/bundle/deb/${{ env.ARTIFACT_NAME1 }} - # asset_name: ${{ env.ARTIFACT_NAME1 }} - # asset_content_type: application/vnd.debian.binary-package diff --git a/.github/workflows/build-android-archive.yml b/.github/workflows/build-android-archive.yml deleted file mode 100644 index 7e5815fb..00000000 --- a/.github/workflows/build-android-archive.yml +++ /dev/null @@ -1,172 +0,0 @@ -permissions: - contents: read -name: Build Android Release Archive - -on: - workflow_dispatch: - inputs: - version: - description: "Version tag (e.g., v0.7.9)" - required: false - default: "manual-build" - -jobs: - build-android: - name: Build Android Release Archive - runs-on: ubuntu-latest - - steps: - - name: Set Build Info - run: | - echo "VERSION_TAG=${{ github.event.inputs.version }}" >> $GITHUB_ENV - echo "BUILD_NUMBER=$(date +%s)" >> $GITHUB_ENV - echo "BUILD_DATE=$(date -u +'%Y-%m-%d %H:%M:%S UTC')" >> $GITHUB_ENV - - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Set up JDK 17 - uses: actions/setup-java@v4 - with: - java-version: "17" - distribution: "temurin" - - - name: Setup Android SDK - uses: android-actions/setup-android@v3 - - - uses: nttld/setup-ndk@v1 - id: setup-ndk - with: - ndk-version: r26d - link-to-sdk: true - - - name: Setup Flutter - uses: subosito/flutter-action@v2 - with: - flutter-version: "3.32.0" - channel: "stable" - - - name: Install dependencies - run: | - cd mobile - flutter pub get - - - name: Setup Android signing - run: | - cd mobile/android - echo "storePassword=${{ secrets.ANDROID_STORE_PASSWORD }}" > key.properties - echo "keyPassword=${{ secrets.ANDROID_KEY_PASSWORD }}" >> key.properties - echo "keyAlias=${{ secrets.ANDROID_KEY_ALIAS }}" >> key.properties - echo "storeFile=../upload-keystore.jks" >> key.properties - echo "${{ secrets.ANDROID_KEYSTORE_BASE64 }}" | base64 -d > upload-keystore.jks - - - name: Update app version - run: | - cd mobile - if [[ "$VERSION_TAG" != "manual-build" && "$VERSION_TAG" != "" ]]; then - # Remove 'v' prefix if present - CLEAN_VERSION=${VERSION_TAG#v} - sed -i "s/^version: .*/version: $CLEAN_VERSION/" pubspec.yaml - echo "Updated version to: $CLEAN_VERSION" - fi - - - name: Build signed APK (split per ABI) - run: | - cd mobile - flutter build apk --release --split-per-abi - echo "Split APK build completed" - - - name: Build signed APK (universal) - run: | - cd mobile - flutter build apk --release - echo "Universal APK build completed" - - - name: Build signed AAB (App Bundle) - run: | - cd mobile - flutter build appbundle --release - echo "AAB build completed" - - - name: Prepare release artifacts - run: | - cd mobile - mkdir -p ../release-artifacts/android - - # Copy split APKs with descriptive names - cp build/app/outputs/flutter-apk/app-arm64-v8a-release.apk ../release-artifacts/android/PinePods-${VERSION_TAG}-arm64-v8a.apk - cp build/app/outputs/flutter-apk/app-armeabi-v7a-release.apk ../release-artifacts/android/PinePods-${VERSION_TAG}-armeabi-v7a.apk - cp build/app/outputs/flutter-apk/app-x86_64-release.apk ../release-artifacts/android/PinePods-${VERSION_TAG}-x86_64.apk - - # Copy universal APK - cp build/app/outputs/flutter-apk/app-release.apk ../release-artifacts/android/PinePods-${VERSION_TAG}-universal.apk - - # Copy AAB for Play Store - cp build/app/outputs/bundle/release/app-release.aab ../release-artifacts/android/PinePods-${VERSION_TAG}-playstore.aab - - # Create build info file - cat > ../release-artifacts/android/BUILD_INFO.txt << EOF - PinePods Mobile - Android Release Build - ===================================== - Version: $VERSION_TAG - Build Date: $BUILD_DATE - Build Number: $BUILD_NUMBER - Flutter Version: 3.32.0 - Package: com.gooseberrydevelopment.pinepods - - 📱 APK FILES: - ============= - PinePods-${VERSION_TAG}-arm64-v8a.apk - 64-bit ARM (most modern phones) - PinePods-${VERSION_TAG}-armeabi-v7a.apk - 32-bit ARM (older phones) - PinePods-${VERSION_TAG}-x86_64.apk - 64-bit Intel (emulators/some tablets) - PinePods-${VERSION_TAG}-universal.apk - Works on all devices (larger size) - - 📦 STORE FILES: - ============== - PinePods-${VERSION_TAG}-playstore.aab - Google Play Store upload (.aab format) - - 🚀 DISTRIBUTION: - ================ - • Google Play Store: Use the .aab file - • F-Droid: They build from source (no APK needed) - • IzzyOnDroid: Use universal.apk or arm64-v8a.apk - • Direct install: Use arm64-v8a.apk for most users - - ⚡ QUICK INSTALL: - ================ - Most users should download: PinePods-${VERSION_TAG}-arm64-v8a.apk - EOF - - - name: Upload Android Release Artifacts - uses: actions/upload-artifact@v4 - with: - name: android-release-${{ env.VERSION_TAG }} - path: release-artifacts/android/ - retention-days: 90 - - - name: Create Release Summary - run: | - echo "# 🤖 Android Release Build Complete" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "**Version:** ${{ github.event.inputs.version }}" >> $GITHUB_STEP_SUMMARY - echo "**Build Date:** $(date -u +'%Y-%m-%d %H:%M:%S UTC')" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - - echo "## 📦 Generated Files" >> $GITHUB_STEP_SUMMARY - echo "- 🎯 **arm64-v8a.apk** - Recommended for most users" >> $GITHUB_STEP_SUMMARY - echo "- 📱 **armeabi-v7a.apk** - For older Android devices" >> $GITHUB_STEP_SUMMARY - echo "- 💻 **x86_64.apk** - For emulators and Intel devices" >> $GITHUB_STEP_SUMMARY - echo "- 🌍 **universal.apk** - Works on all devices (larger file)" >> $GITHUB_STEP_SUMMARY - echo "- 🏪 **playstore.aab** - For Google Play Store upload" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "## 📥 Download Instructions" >> $GITHUB_STEP_SUMMARY - echo "1. Go to **Actions** tab → This workflow run" >> $GITHUB_STEP_SUMMARY - echo "2. Scroll down to **Artifacts** section" >> $GITHUB_STEP_SUMMARY - echo "3. Download \`android-release-${{ github.event.inputs.version }}\`" >> $GITHUB_STEP_SUMMARY - echo "4. Extract the ZIP file" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "## 🚀 Next Steps" >> $GITHUB_STEP_SUMMARY - echo "- Test the APK on your device" >> $GITHUB_STEP_SUMMARY - echo "- Submit \`.aab\` file to Google Play Console" >> $GITHUB_STEP_SUMMARY - echo "- Submit \`universal.apk\` to IzzyOnDroid" >> $GITHUB_STEP_SUMMARY - echo "- Create GitHub release with APKs for direct download" >> $GITHUB_STEP_SUMMARY diff --git a/.github/workflows/build-android-flutter.yml b/.github/workflows/build-android-flutter.yml index 6a200e00..e7db7546 100644 --- a/.github/workflows/build-android-flutter.yml +++ b/.github/workflows/build-android-flutter.yml @@ -1,5 +1,5 @@ permissions: - contents: read + contents: write name: Build Android Flutter App on: @@ -25,6 +25,9 @@ jobs: - name: Checkout repository uses: actions/checkout@v4 + with: + fetch-depth: 0 # Fetch full git history for accurate commit count + token: ${{ secrets.GITHUB_TOKEN }} - name: Set up JDK 17 uses: actions/setup-java@v4 @@ -38,7 +41,7 @@ jobs: - name: Setup Flutter uses: subosito/flutter-action@v2 with: - flutter-version: "3.32.0" + flutter-version: "3.35.2" channel: "stable" - name: Install dependencies @@ -55,38 +58,51 @@ jobs: echo "storeFile=../upload-keystore.jks" >> key.properties echo "${{ secrets.ANDROID_KEYSTORE_BASE64 }}" | base64 -d > upload-keystore.jks - - name: Update app version + - name: Verify version files run: | cd mobile - # Update pubspec.yaml version to use Flutter format (version+build) - if [[ "$IMAGE_TAG" != "latest" ]]; then - # Remove 'v' prefix if present and create build number from date - VERSION_NAME=${IMAGE_TAG#v} - BUILD_NUMBER=$(date +%Y%m%d) - sed -i "s/^version: .*/version: ${VERSION_NAME}+${BUILD_NUMBER}/" pubspec.yaml - fi + echo "Current version in pubspec.yaml:" + grep "^version:" pubspec.yaml + echo "Current version in environment.dart:" + grep "_projectVersion\|_build" lib/core/environment.dart + echo "Build will use versions exactly as they are in the repository" + - name: Build APK run: | cd mobile flutter build apk --release --split-per-abi - # - name: Build AAB - # run: | - # cd mobile - # flutter build appbundle --release + - name: Build AAB + run: | + cd mobile + flutter build appbundle --release + + - name: Rename APK files + run: | + cd mobile/build/app/outputs/flutter-apk + # Extract version from IMAGE_TAG (remove 'v' prefix if present) + VERSION=${IMAGE_TAG#v} + if [[ "$VERSION" == "latest" ]]; then + VERSION="0.0.0" + fi + + # Rename APK files with proper naming convention + mv app-armeabi-v7a-release.apk pinepods-armeabi-${VERSION}.apk + mv app-arm64-v8a-release.apk pinepods-arm64-${VERSION}.apk + mv app-x86_64-release.apk pinepods-x86_64-${VERSION}.apk - name: Upload APK artifacts uses: actions/upload-artifact@v4 with: name: android-apk-builds - path: mobile/build/app/outputs/flutter-apk/*.apk + path: mobile/build/app/outputs/flutter-apk/pinepods-*.apk - # - name: Upload AAB artifact - # uses: actions/upload-artifact@v4 - # with: - # name: android-aab-build - # path: mobile/build/app/outputs/bundle/release/app-release.aab + - name: Upload AAB artifact + uses: actions/upload-artifact@v4 + with: + name: android-aab-build + path: mobile/build/app/outputs/bundle/release/app-release.aab # - name: Upload to Google Play Store # if: github.event_name == 'release' diff --git a/.github/workflows/build-fdroid.yml b/.github/workflows/build-fdroid.yml deleted file mode 100644 index e6287bed..00000000 --- a/.github/workflows/build-fdroid.yml +++ /dev/null @@ -1,68 +0,0 @@ -name: Build F-Droid APK - -on: - release: - types: [published] - workflow_dispatch: - inputs: - version: - description: "Manual override version tag (optional)" - required: false - -jobs: - build: - name: Build F-Droid Release - runs-on: ubuntu-latest - permissions: - contents: read - - steps: - - name: Set Image Tag - run: echo "IMAGE_TAG=${{ github.event.release.tag_name || github.event.inputs.version || 'latest' }}" >> $GITHUB_ENV - - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Set up JDK 17 - uses: actions/setup-java@v4 - with: - java-version: "17" - distribution: "temurin" - - - name: Setup Android SDK - uses: android-actions/setup-android@v3 - - - name: Setup Flutter - uses: subosito/flutter-action@v2 - with: - flutter-version: '3.32.0' - channel: 'stable' - - - name: Install dependencies - run: | - cd mobile - flutter pub get - - - name: Update app version - run: | - cd mobile - # Update pubspec.yaml version - if [[ "$IMAGE_TAG" != "latest" ]]; then - sed -i "s/^version: .*/version: ${IMAGE_TAG#v}/" pubspec.yaml - fi - - - name: Build F-Droid APK (unsigned) - run: | - cd mobile - flutter build apk --release - - - name: Rename APK for F-Droid - run: | - cd mobile - cp build/app/outputs/flutter-apk/app-release.apk build/app/outputs/flutter-apk/PinePods-fdroid-${IMAGE_TAG#v}.apk - - - name: Upload F-Droid APK artifact - uses: actions/upload-artifact@v4 - with: - name: fdroid-apk-build - path: mobile/build/app/outputs/flutter-apk/PinePods-fdroid-*.apk \ No newline at end of file diff --git a/.github/workflows/build-flatpak.yml b/.github/workflows/build-flatpak.yml index 773d16a2..744c9684 100644 --- a/.github/workflows/build-flatpak.yml +++ b/.github/workflows/build-flatpak.yml @@ -21,7 +21,7 @@ jobs: if: ${{ github.event.workflow_run.conclusion == 'success' || github.event_name == 'workflow_dispatch' }} steps: - name: Checkout code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install Flatpak run: | diff --git a/.github/workflows/build-helm-chart.yml b/.github/workflows/build-helm-chart.yml index 2b2729b3..93e61853 100644 --- a/.github/workflows/build-helm-chart.yml +++ b/.github/workflows/build-helm-chart.yml @@ -24,15 +24,10 @@ jobs: steps: - name: Checkout code - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: - persist-credentials: false # This prevents the default token from being persisted in the local git config - - - name: Setup Git for push - run: | - git config --global user.name "github-actions[bot]" - git config --global user.email "github-actions[bot]@users.noreply.github.com" - git remote set-url origin https://x-access-token:${{ secrets.PUSH_PAT }}@github.com/${{ github.repository }}.git + token: ${{ secrets.PUSH_PAT }} + persist-credentials: true - name: Setup Helm uses: Azure/setup-helm@v4.2.0 @@ -80,8 +75,8 @@ jobs: - uses: EndBug/add-and-commit@v9 with: + github_token: ${{ secrets.PUSH_PAT }} committer_name: GitHub Actions committer_email: actions@github.com message: "Update Helm chart for release ${{ github.event.release.tag_name }}" add: "docs" - push: "origin main" diff --git a/.github/workflows/build-ios-archive.yml b/.github/workflows/build-ios-archive.yml deleted file mode 100644 index be8ce4fa..00000000 --- a/.github/workflows/build-ios-archive.yml +++ /dev/null @@ -1,197 +0,0 @@ -name: Build iOS Release Archive - -permissions: - contents: read - secrets: read - actions: write - -on: - workflow_dispatch: - inputs: - version: - description: "Version tag (e.g., v0.7.9)" - required: false - default: "manual-build" - -jobs: - build-ios: - name: Build iOS Release Archive - runs-on: macOS-latest - - steps: - - name: Set Build Info - run: | - echo "VERSION_TAG=${{ github.event.inputs.version }}" >> $GITHUB_ENV - echo "BUILD_NUMBER=$(date +%s)" >> $GITHUB_ENV - echo "BUILD_DATE=$(date -u +'%Y-%m-%d %H:%M:%S UTC')" >> $GITHUB_ENV - - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Setup Flutter - uses: subosito/flutter-action@v2 - with: - flutter-version: "3.32.0" - channel: "stable" - - - name: Install dependencies - run: | - cd mobile - flutter pub get - cd ios - pod install - - - name: Setup iOS signing - env: - IOS_CERTIFICATE_BASE64: ${{ secrets.IOS_CERTIFICATE_BASE64 }} - IOS_CERTIFICATE_PASSWORD: ${{ secrets.IOS_CERTIFICATE_PASSWORD }} - IOS_PROVISIONING_PROFILE_BASE64: ${{ secrets.IOS_PROVISIONING_PROFILE_BASE64 }} - KEYCHAIN_PASSWORD: ${{ secrets.KEYCHAIN_PASSWORD }} - run: | - # Create keychain - security create-keychain -p "$KEYCHAIN_PASSWORD" build.keychain - security default-keychain -s build.keychain - security unlock-keychain -p "$KEYCHAIN_PASSWORD" build.keychain - security set-keychain-settings -t 3600 -l build.keychain - - # Import certificate - echo "$IOS_CERTIFICATE_BASE64" | base64 -d > certificate.p12 - security import certificate.p12 -P "$IOS_CERTIFICATE_PASSWORD" -A - - # Install provisioning profile - mkdir -p ~/Library/MobileDevice/Provisioning\ Profiles - echo "$IOS_PROVISIONING_PROFILE_BASE64" | base64 -d > ~/Library/MobileDevice/Provisioning\ Profiles/build.mobileprovision - - - name: Update app version - run: | - cd mobile - if [[ "$VERSION_TAG" != "manual-build" && "$VERSION_TAG" != "" ]]; then - # Remove 'v' prefix if present - CLEAN_VERSION=${VERSION_TAG#v} - sed -i '' "s/^version: .*/version: $CLEAN_VERSION/" pubspec.yaml - echo "Updated version to: $CLEAN_VERSION" - fi - - - name: Create export options plist - run: | - cd mobile/ios - cat > exportOptions.plist << EOF - - - - - method - app-store - teamID - ${{ secrets.IOS_TEAM_ID }} - uploadBitcode - - uploadSymbols - - compileBitcode - - - - EOF - - - name: Build iOS app - run: | - cd mobile - flutter build ios --release --no-codesign - echo "iOS build completed" - - - name: Archive and sign iOS app - run: | - cd mobile/ios - xcodebuild -workspace Runner.xcworkspace \ - -scheme Runner \ - -configuration Release \ - -destination generic/platform=iOS \ - -archivePath build/Runner.xcarchive \ - archive - - xcodebuild -exportArchive \ - -archivePath build/Runner.xcarchive \ - -exportPath build \ - -exportOptionsPlist exportOptions.plist - - - name: Prepare release artifacts - run: | - cd mobile - mkdir -p ../release-artifacts/ios - - # Find and copy IPA - find ios/build -name "*.ipa" -exec cp {} ../release-artifacts/ios/PinePods-${VERSION_TAG}.ipa \; - - # Create build info file - cat > ../release-artifacts/ios/BUILD_INFO.txt << EOF - PinePods Mobile - iOS Release Build - ================================== - Version: $VERSION_TAG - Build Date: $BUILD_DATE - Build Number: $BUILD_NUMBER - Flutter Version: 3.32.0 - Bundle ID: com.gooseberrydevelopment.pinepods - - 📱 IPA FILE: - =========== - PinePods-${VERSION_TAG}.ipa - iOS App Store package - - 🚀 DISTRIBUTION: - =============== - • App Store: Upload IPA to App Store Connect - • TestFlight: Upload via App Store Connect for beta testing - • Enterprise: Use enterprise provisioning profile (separate build needed) - - ⚡ UPLOAD INSTRUCTIONS: - ====================== - 1. Go to App Store Connect (appstoreconnect.apple.com) - 2. Select your app → TestFlight or App Store tab - 3. Click "+" to add new build - 4. Upload the .ipa file - 5. Wait for processing (10-30 minutes) - 6. Submit for review when ready - EOF - - - name: Upload iOS Release Artifacts - uses: actions/upload-artifact@v4 - with: - name: ios-release-${{ env.VERSION_TAG }} - path: release-artifacts/ios/ - retention-days: 90 - - - name: Create Release Summary - run: | - echo "# 🍎 iOS Release Build Complete" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "**Version:** ${{ github.event.inputs.version }}" >> $GITHUB_STEP_SUMMARY - echo "**Build Date:** $(date -u +'%Y-%m-%d %H:%M:%S UTC')" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - - echo "## 📦 Generated Files" >> $GITHUB_STEP_SUMMARY - echo "- 📱 **PinePods-${{ github.event.inputs.version }}.ipa** - App Store ready package" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "## 📥 Download Instructions" >> $GITHUB_STEP_SUMMARY - echo "1. Go to **Actions** tab → This workflow run" >> $GITHUB_STEP_SUMMARY - echo "2. Scroll down to **Artifacts** section" >> $GITHUB_STEP_SUMMARY - echo "3. Download \`ios-release-${{ github.event.inputs.version }}\`" >> $GITHUB_STEP_SUMMARY - echo "4. Extract the ZIP file" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "## 🚀 Next Steps" >> $GITHUB_STEP_SUMMARY - echo "- Upload \`.ipa\` to App Store Connect" >> $GITHUB_STEP_SUMMARY - echo "- Submit to TestFlight for beta testing" >> $GITHUB_STEP_SUMMARY - echo "- Submit for App Store review when ready" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "## 💡 Requirements" >> $GITHUB_STEP_SUMMARY - echo "- Apple Developer Account (\$99/year)" >> $GITHUB_STEP_SUMMARY - echo "- Valid distribution certificate and provisioning profile" >> $GITHUB_STEP_SUMMARY - echo "- All iOS secrets configured in GitHub repository settings" >> $GITHUB_STEP_SUMMARY - - - name: Cleanup keychain and provisioning profile - if: always() - run: | - if security list-keychains | grep -q "build.keychain"; then - security delete-keychain build.keychain - fi - rm -f ~/Library/MobileDevice/Provisioning\ Profiles/build.mobileprovision - rm -f certificate.p12 diff --git a/.github/workflows/build-snap.yml b/.github/workflows/build-snap.yml index af8503e5..5141df1d 100644 --- a/.github/workflows/build-snap.yml +++ b/.github/workflows/build-snap.yml @@ -18,7 +18,7 @@ jobs: if: ${{ github.event.workflow_run.conclusion == 'success' || github.event_name == 'workflow_dispatch' }} steps: - name: Checkout code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Get version id: get_version diff --git a/.github/workflows/build-tauri-clients.yml b/.github/workflows/build-tauri-clients.yml index 8de2d6ac..6d9a08e3 100644 --- a/.github/workflows/build-tauri-clients.yml +++ b/.github/workflows/build-tauri-clients.yml @@ -22,9 +22,7 @@ jobs: - windows-latest include: - os: ubuntu-arm64 - runs-on: - - runs-on=${{ github.run_id }} - - runner=4cpu-linux-arm64 + runs-on: ubuntu-24.04-arm runs-on: ${{ matrix.runs-on || matrix.os }} @@ -73,25 +71,16 @@ jobs: if: ${{ matrix.os == 'windows-latest' }} - name: Setup | Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - uses: hecrj/setup-rust-action@v2 with: - rust-version: 1.86 + rust-version: 1.89 targets: wasm32-unknown-unknown - # Install cargo-binstall for macOS runners using direct download - - name: Install cargo-binstall on macOS - if: matrix.os == 'macos-latest' - run: | - export GITHUB_TOKEN=${{ secrets.RELEASE_TOKEN }} - curl -L https://github.com/cargo-bins/cargo-binstall/releases/download/v1.9.0/cargo-binstall-universal-apple-darwin.zip -o cargo-binstall.zip - unzip cargo-binstall.zip - ./cargo-binstall -y --force cargo-binstall - - # Install cargo-binstall for other OSes using the standard method + # Install cargo-binstall for Linux/Windows - name: Install cargo-binstall - if: matrix.os != 'macos-latest' + if: matrix.os != 'macos-latest' && matrix.os != 'macOS-13' uses: cargo-bins/cargo-binstall@main - name: Depends install @@ -105,7 +94,13 @@ jobs: run: | rustup target add wasm32-unknown-unknown - - name: Install Trunk + - name: Install Trunk (macOS) + if: matrix.os == 'macos-latest' || matrix.os == 'macOS-13' + run: | + brew install trunk + + - name: Install Trunk (Linux/Windows) + if: matrix.os != 'macos-latest' && matrix.os != 'macOS-13' run: | cargo binstall trunk -y @@ -143,9 +138,10 @@ jobs: - name: Build | Compile (UNIX) run: | - cd web/src-tauri + cd web + RUSTFLAGS="--cfg=web_sys_unstable_apis --cfg getrandom_backend=\"wasm_js\"" trunk build --features server_build + cd src-tauri cat tauri.conf.json - export RUSTFLAGS="--cfg=web_sys_unstable_apis --cfg getrandom_backend=\"wasm_js\"" cargo tauri build pwd ls @@ -155,9 +151,10 @@ jobs: - name: Build | Compile (Windows) run: | - cd web/src-tauri + cd web + powershell -ExecutionPolicy Bypass -File .\build.ps1 + cd src-tauri Get-Content tauri.conf.json - $env:RUSTFLAGS="--cfg=web_sys_unstable_apis --cfg getrandom_backend=`"wasm_js`"" cargo tauri build ls target/release/bundle shell: pwsh diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index d17fe3da..58bbc8be 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -26,7 +26,7 @@ jobs: --health-retries 5 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python uses: actions/setup-python@v4 @@ -56,11 +56,11 @@ jobs: frontend-tests: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: hecrj/setup-rust-action@v2 with: - rust-version: 1.86 + rust-version: 1.89 targets: wasm32-unknown-unknown # Install cargo-binstall for other OSes using the standard method diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml index 7da34ba1..6ce793ef 100644 --- a/.github/workflows/docker-publish.yml +++ b/.github/workflows/docker-publish.yml @@ -36,7 +36,7 @@ jobs: id-token: write steps: - name: Checkout code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v2 - name: Log in to Docker Hub @@ -55,16 +55,14 @@ jobs: build-and-push-arm64: needs: set-env - runs-on: - - runs-on=${{ github.run_id }} - - runner=4cpu-linux-arm64 + runs-on: ubuntu-24.04-arm permissions: contents: read packages: write id-token: write steps: - name: Checkout code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v2 - name: Log in to Docker Hub @@ -90,7 +88,7 @@ jobs: id-token: write steps: - name: Checkout code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v2 - name: Log in to Docker Hub diff --git a/.github/workflows/nightly-docker-publish.yml b/.github/workflows/nightly-docker-publish.yml index 6b098662..57a298bb 100644 --- a/.github/workflows/nightly-docker-publish.yml +++ b/.github/workflows/nightly-docker-publish.yml @@ -19,7 +19,7 @@ jobs: id-token: write steps: - name: Checkout code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v2 @@ -39,16 +39,14 @@ jobs: run: echo ${{ steps.docker_build.outputs.digest }} build-and-push-nightly-arm64: - runs-on: - - runs-on=${{ github.run_id }} - - runner=2cpu-linux-arm64 + runs-on: ubuntu-24.04-arm permissions: contents: read packages: write id-token: write steps: - name: Checkout code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v2 @@ -76,7 +74,7 @@ jobs: id-token: write steps: - name: Checkout code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v2 diff --git a/.github/workflows/notification.yml b/.github/workflows/notification.yml index e7884a34..dbd9ea93 100644 --- a/.github/workflows/notification.yml +++ b/.github/workflows/notification.yml @@ -21,7 +21,7 @@ jobs: steps: - name: Checkout code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Fetch the latest release id: fetch_release @@ -32,6 +32,14 @@ jobs: echo "Release URL: $release_url" echo "::set-output name=version::$latest_release" echo "::set-output name=release_url::$release_url" + + # Check if this is an RC release + if [[ "$latest_release" == *"-rc"* ]]; then + echo "RC release detected, skipping Discord notification" + echo "::set-output name=is_rc::true" + else + echo "::set-output name=is_rc::false" + fi - name: Set release message id: set_message @@ -45,7 +53,13 @@ jobs: echo "::set-output name=message::$message" fi + - name: Skip Discord notification for RC release + if: steps.fetch_release.outputs.is_rc == 'true' + run: | + echo "Skipping Discord notification for RC release: ${{ steps.fetch_release.outputs.version }}" + - name: Discord notification to announce deployment + if: steps.fetch_release.outputs.is_rc == 'false' env: DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK }} uses: Ilshidur/action-discord@master diff --git a/.github/workflows/pre-release-version-update.yml b/.github/workflows/pre-release-version-update.yml new file mode 100644 index 00000000..c360c0ca --- /dev/null +++ b/.github/workflows/pre-release-version-update.yml @@ -0,0 +1,53 @@ +name: Pre-Release Version Update + +on: + workflow_dispatch: + inputs: + version: + description: "Version to set (e.g., 0.8.0)" + required: true + type: string + +jobs: + update-version: + name: Update Version Files + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Update app version + run: | + cd mobile + VERSION_NAME=${{ github.event.inputs.version }} + # Calculate what the git count WILL BE after we commit (current + 1) + BUILD_NUMBER=$(($(git rev-list --count HEAD) + 1 + 20250000)) + + # Update pubspec.yaml version + sed -i "s/^version: .*/version: ${VERSION_NAME}+${BUILD_NUMBER}/" pubspec.yaml + + # Update environment.dart constants + sed -i "s/static const _projectVersion = '[^']*';/static const _projectVersion = '${VERSION_NAME}';/" lib/core/environment.dart + sed -i "s/static const _build = '[^']*';/static const _build = '${BUILD_NUMBER}';/" lib/core/environment.dart + + echo "Updated version to ${VERSION_NAME}+${BUILD_NUMBER}" + + - name: Commit and push version update + run: | + git config --local user.email "action@github.com" + git config --local user.name "GitHub Action" + git add mobile/pubspec.yaml mobile/lib/core/environment.dart + git commit -m "chore: update version to ${{ github.event.inputs.version }} [skip ci]" + git push + + - name: Summary + run: | + echo "✅ Version updated to ${{ github.event.inputs.version }}" + echo "📋 Next steps:" + echo "1. Create a GitHub release pointing to the latest commit" + echo "2. The release workflow will build from that exact commit" + echo "3. Version files will match the commit for reproducible builds" \ No newline at end of file diff --git a/.github/workflows/test-ios-app.yml b/.github/workflows/test-ios-app.yml deleted file mode 100644 index 3bece6ec..00000000 --- a/.github/workflows/test-ios-app.yml +++ /dev/null @@ -1,122 +0,0 @@ -name: Build IOS Pinepods App (Legacy Tauri - Deprecated) - -on: - # This workflow is deprecated in favor of build-ios-flutter.yml - # release: - # types: [published] - workflow_dispatch: - inputs: - version: - description: "Manual override version tag (optional)" - required: false - -jobs: - build: - name: Build ios Release - runs-on: macOS-latest - - steps: - - name: Set Image Tag (Unix) - run: echo "IMAGE_TAG=${{ github.event.release.tag_name || github.event.inputs.version || 'latest' }}" >> $GITHUB_ENV - - - name: Setup | Checkout - uses: actions/checkout@v3 - - - uses: hecrj/setup-rust-action@v2 - with: - rust-version: 1.86 - targets: wasm32-unknown-unknown - - - name: Install cargo-binstall - uses: cargo-bins/cargo-binstall@main - - - name: Depends install - if: ${{ env.DEPENDS_SETUP == 'true' }} - run: | - sudo apt update - sudo apt install -qy libgtk-3-dev - sudo apt-get install -y libwebkit2gtk-4.0-dev libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf - - - name: wasm-addition - run: | - rustup target add wasm32-unknown-unknown - - - name: Install Trunk - run: | - cargo binstall trunk -y - - - name: Install Tauri - run: | - cargo install tauri-cli@2.0.0-rc.16 --locked - - name: Update Tauri version - run: | - cd web/src-tauri - sed -i '' "s/\"version\": \".*\"/\"version\": \"${IMAGE_TAG}\"/" tauri.conf.json - cat tauri.conf.json - - - name: Build iOS app - run: | - cd web/src-tauri - cargo tauri icon icons/Square1024x1024.png - cargo tauri ios init - cargo tauri ios build - cargo tauri icon src-tauri/icons/Square1024x1024.png - cargo tauri ios build - # --release --export-method app-store-connect - - name: Upload IPA - uses: actions/upload-artifact@v4 - with: - name: Pinepods-iOS - path: web/src-tauri/gen/apple/build/arm64/*.ipa - - # - name: Upload to App Store Connect - # env: - # APPLE_API_KEY_ID: ${{ secrets.APPLE_API_KEY_ID }} - # APPLE_API_ISSUER: ${{ secrets.APPLE_API_ISSUER }} - # run: | - # xcrun altool --upload-app --type ios --file "web/src-tauri/gen/apple/build/arm64/*.ipa" --apiKey $APPLE_API_KEY_ID --apiIssuer $APPLE_API_ISSUER - - # - name: Cleanup keychain and provisioning profile - # if: ${{ always() }} - # run: | - # security delete-keychain $RUNNER_TEMP/app-signing.keychain-db - # rm ~/Library/MobileDevice/Provisioning\ Profiles/build_pp.mobileprovision - - # - name: Archive build 1 (apk) - # uses: actions/upload-artifact@v3 - # with: - # name: ${{ matrix.os }}-build - # path: ./web/src-tauri/gen/android/app/build/outputs/apk/universal/release/app-universal-release.apk - # if: ${{ matrix.os == 'ubuntu-latest' }} - - # - name: Archive build 2 (aab) - # uses: actions/upload-artifact@v3 - # with: - # name: ${{ matrix.os }}-build - # path: ./web/src-tauri/gen/android/app/build/outputs/apk/universal/release/app-universal-release.apk - # if: ${{ matrix.os == 'ubuntu-latest' }} - - # - name: Archive build 2 (Ubuntu) - # uses: actions/upload-artifact@v3 - # with: - # name: ${{ matrix.os }}-build - # path: ./web/src-tauri/target/release/bundle/appimage/${{ env.ARTIFACT_NAME2 }} - # if: ${{ matrix.os == 'ubuntu-latest' }} - - # - name: Archive build 3 (Ubuntu) - # uses: actions/upload-artifact@v3 - # with: - # name: ${{ matrix.os }}-build - # path: ./web/src-tauri/target/release/bundle/rpm/${{ env.ARTIFACT_NAME3 }} - # if: ${{ matrix.os == 'ubuntu-latest' }} - - # - name: Upload release asset (Ubuntu - DEB) - # if: github.event_name == 'release' && matrix.os == 'ubuntu-latest' - # uses: actions/upload-release-asset@v1 - # env: - # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - # with: - # upload_url: ${{ github.event.release.upload_url }} - # asset_path: ./web/src-tauri/target/release/bundle/deb/${{ env.ARTIFACT_NAME1 }} - # asset_name: ${{ env.ARTIFACT_NAME1 }} - # asset_content_type: application/vnd.debian.binary-package diff --git a/.github/workflows/test-pinepods.yml b/.github/workflows/test-pinepods.yml index fbbc8e62..cf01dfa3 100644 --- a/.github/workflows/test-pinepods.yml +++ b/.github/workflows/test-pinepods.yml @@ -11,7 +11,7 @@ jobs: test: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Build the Docker test container run: docker build -t madeofpendletonwool/pinepods-test . -f dockerfile-test - uses: rustsec/audit-check@v1.4.1 @@ -37,5 +37,5 @@ jobs: - uses: taiki-e/cache-cargo-install-action@v1 with: tool: cargo-checkmate - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - run: cargo-checkmate run ${{ matrix.phase }} \ No newline at end of file diff --git a/.github/workflows/update-aur-package.yml b/.github/workflows/update-aur-package.yml index 462a0aa2..38f2d7f8 100644 --- a/.github/workflows/update-aur-package.yml +++ b/.github/workflows/update-aur-package.yml @@ -17,7 +17,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set version run: | diff --git a/.gitignore b/.gitignore index 596291b7..50518385 100644 --- a/.gitignore +++ b/.gitignore @@ -88,6 +88,8 @@ clients/mac-app/pinepods.spec web/target/* web/.idea/* keystore.properties +key.properties +**/key.properties # Virtual Environment diff --git a/Backend/pinepods_backend/Cargo.lock b/Backend/pinepods_backend/Cargo.lock new file mode 100644 index 00000000..3411cf4a --- /dev/null +++ b/Backend/pinepods_backend/Cargo.lock @@ -0,0 +1,2712 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "actix-codec" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f7b0a21988c1bf877cf4759ef5ddaac04c1c9fe808c9142ecb78ba97d97a28a" +dependencies = [ + "bitflags", + "bytes", + "futures-core", + "futures-sink", + "memchr", + "pin-project-lite", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "actix-cors" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daa239b93927be1ff123eebada5a3ff23e89f0124ccb8609234e5103d5a5ae6d" +dependencies = [ + "actix-utils", + "actix-web", + "derive_more", + "futures-util", + "log", + "once_cell", + "smallvec", +] + +[[package]] +name = "actix-http" +version = "3.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44dfe5c9e0004c623edc65391dfd51daa201e7e30ebd9c9bedf873048ec32bc2" +dependencies = [ + "actix-codec", + "actix-rt", + "actix-service", + "actix-utils", + "base64", + "bitflags", + "brotli", + "bytes", + "bytestring", + "derive_more", + "encoding_rs", + "flate2", + "foldhash", + "futures-core", + "h2 0.3.27", + "http 0.2.12", + "httparse", + "httpdate", + "itoa", + "language-tags", + "local-channel", + "mime", + "percent-encoding", + "pin-project-lite", + "rand", + "sha1", + "smallvec", + "tokio", + "tokio-util", + "tracing", + "zstd", +] + +[[package]] +name = "actix-macros" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e01ed3140b2f8d422c68afa1ed2e85d996ea619c988ac834d255db32138655cb" +dependencies = [ + "quote", + "syn", +] + +[[package]] +name = "actix-router" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13d324164c51f63867b57e73ba5936ea151b8a41a1d23d1031eeb9f70d0236f8" +dependencies = [ + "bytestring", + "cfg-if", + "http 0.2.12", + "regex", + "regex-lite", + "serde", + "tracing", +] + +[[package]] +name = "actix-rt" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24eda4e2a6e042aa4e55ac438a2ae052d3b5da0ecf83d7411e1a368946925208" +dependencies = [ + "futures-core", + "tokio", +] + +[[package]] +name = "actix-server" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a65064ea4a457eaf07f2fba30b4c695bf43b721790e9530d26cb6f9019ff7502" +dependencies = [ + "actix-rt", + "actix-service", + "actix-utils", + "futures-core", + "futures-util", + "mio", + "socket2 0.5.10", + "tokio", + "tracing", +] + +[[package]] +name = "actix-service" +version = "2.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e46f36bf0e5af44bdc4bdb36fbbd421aa98c79a9bce724e1edeb3894e10dc7f" +dependencies = [ + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "actix-utils" +version = "3.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88a1dcdff1466e3c2488e1cb5c36a71822750ad43839937f85d2f4d9f8b705d8" +dependencies = [ + "local-waker", + "pin-project-lite", +] + +[[package]] +name = "actix-web" +version = "4.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a597b77b5c6d6a1e1097fddde329a83665e25c5437c696a3a9a4aa514a614dea" +dependencies = [ + "actix-codec", + "actix-http", + "actix-macros", + "actix-router", + "actix-rt", + "actix-server", + "actix-service", + "actix-utils", + "actix-web-codegen", + "bytes", + "bytestring", + "cfg-if", + "cookie", + "derive_more", + "encoding_rs", + "foldhash", + "futures-core", + "futures-util", + "impl-more", + "itoa", + "language-tags", + "log", + "mime", + "once_cell", + "pin-project-lite", + "regex", + "regex-lite", + "serde", + "serde_json", + "serde_urlencoded", + "smallvec", + "socket2 0.5.10", + "time", + "tracing", + "url", +] + +[[package]] +name = "actix-web-codegen" +version = "4.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f591380e2e68490b5dfaf1dd1aa0ebe78d84ba7067078512b4ea6e4492d622b8" +dependencies = [ + "actix-router", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "addr2line" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" +dependencies = [ + "gimli", +] + +[[package]] +name = "adler2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" + +[[package]] +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +dependencies = [ + "memchr", +] + +[[package]] +name = "alloc-no-stdlib" +version = "2.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3" + +[[package]] +name = "alloc-stdlib" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece" +dependencies = [ + "alloc-no-stdlib", +] + +[[package]] +name = "android-tzdata" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" + +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + +[[package]] +name = "anstream" +version = "0.6.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ae563653d1938f79b1ab1b5e668c87c76a9930414574a6583a7b7e11a8e6192" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is_terminal_polyfill", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "862ed96ca487e809f1c8e5a8447f6ee2cf102f846893800b20cebdf541fc6bbd" + +[[package]] +name = "anstyle-parse" +version = "0.2.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e231f6134f61b71076a3eab506c379d4f36122f2af15a9ff04415ea4c3339e2" +dependencies = [ + "windows-sys 0.60.2", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3e0633414522a32ffaac8ac6cc8f748e090c5717661fddeea04219e2344f5f2a" +dependencies = [ + "anstyle", + "once_cell_polyfill", + "windows-sys 0.60.2", +] + +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + +[[package]] +name = "autocfg" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" + +[[package]] +name = "backtrace" +version = "0.3.75" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6806a6321ec58106fea15becdad98371e28d92ccbc7c8f1b3b6dd724fe8f1002" +dependencies = [ + "addr2line", + "cfg-if", + "libc", + "miniz_oxide", + "object", + "rustc-demangle", + "windows-targets 0.52.6", +] + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + +[[package]] +name = "bitflags" +version = "2.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967" + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "brotli" +version = "8.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9991eea70ea4f293524138648e41ee89b0b2b12ddef3b255effa43c8056e0e0d" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", + "brotli-decompressor", +] + +[[package]] +name = "brotli-decompressor" +version = "5.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "874bb8112abecc98cbd6d81ea4fa7e94fb9449648c93cc89aa40c81c24d7de03" +dependencies = [ + "alloc-no-stdlib", + "alloc-stdlib", +] + +[[package]] +name = "bumpalo" +version = "3.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" + +[[package]] +name = "bytes" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" + +[[package]] +name = "bytestring" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e465647ae23b2823b0753f50decb2d5a86d2bb2cac04788fafd1f80e45378e5f" +dependencies = [ + "bytes", +] + +[[package]] +name = "cc" +version = "1.2.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2352e5597e9c544d5e6d9c95190d5d27738ade584fa8db0a16e130e5c2b5296e" +dependencies = [ + "jobserver", + "libc", + "shlex", +] + +[[package]] +name = "cfg-if" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268" + +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + +[[package]] +name = "chrono" +version = "0.4.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c469d952047f47f91b68d1cba3f10d63c11d73e4636f24f08daf0278abf01c4d" +dependencies = [ + "android-tzdata", + "iana-time-zone", + "js-sys", + "num-traits", + "serde", + "wasm-bindgen", + "windows-link", +] + +[[package]] +name = "colorchoice" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" + +[[package]] +name = "cookie" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e859cd57d0710d9e06c381b550c06e76992472a8c6d527aecd2fc673dcc231fb" +dependencies = [ + "percent-encoding", + "time", + "version_check", +] + +[[package]] +name = "core-foundation" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "core-foundation-sys" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" + +[[package]] +name = "cpufeatures" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" +dependencies = [ + "libc", +] + +[[package]] +name = "crc32fast" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "deranged" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c9e6a11ca8224451684bc0d7d5a7adbf8f2fd6887261a1cfc3c0432f9d4068e" +dependencies = [ + "powerfmt", +] + +[[package]] +name = "derive_more" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "093242cf7570c207c83073cf82f79706fe7b8317e98620a47d5be7c3d8497678" +dependencies = [ + "derive_more-impl", +] + +[[package]] +name = "derive_more-impl" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "unicode-xid", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "crypto-common", +] + +[[package]] +name = "displaydoc" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "dotenvy" +version = "0.15.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" + +[[package]] +name = "encoding_rs" +version = "0.8.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" +dependencies = [ + "cfg-if", +] + +[[package]] +name = "env_filter" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "186e05a59d4c50738528153b83b0b0194d3a29507dfec16eccd4b342903397d0" +dependencies = [ + "log", + "regex", +] + +[[package]] +name = "env_logger" +version = "0.11.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c863f0904021b108aa8b2f55046443e6b1ebde8fd4a15c399893aae4fa069f" +dependencies = [ + "anstream", + "anstyle", + "env_filter", + "jiff", + "log", +] + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "errno" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad" +dependencies = [ + "libc", + "windows-sys 0.60.2", +] + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "flate2" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a3d7db9596fecd151c5f638c0ee5d5bd487b6e0ea232e5dc96d5250f6f94b1d" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + +[[package]] +name = "fnv" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" + +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + +[[package]] +name = "form_urlencoded" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "futures-channel" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" +dependencies = [ + "futures-core", +] + +[[package]] +name = "futures-core" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" + +[[package]] +name = "futures-sink" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" + +[[package]] +name = "futures-task" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" + +[[package]] +name = "futures-util" +version = "0.3.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" +dependencies = [ + "futures-core", + "futures-task", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "getrandom" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "wasi 0.11.1+wasi-snapshot-preview1", + "wasm-bindgen", +] + +[[package]] +name = "getrandom" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" +dependencies = [ + "cfg-if", + "js-sys", + "libc", + "r-efi", + "wasi 0.14.2+wasi-0.2.4", + "wasm-bindgen", +] + +[[package]] +name = "gimli" +version = "0.31.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" + +[[package]] +name = "h2" +version = "0.3.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0beca50380b1fc32983fc1cb4587bfa4bb9e78fc259aad4a0032d2080309222d" +dependencies = [ + "bytes", + "fnv", + "futures-core", + "futures-sink", + "futures-util", + "http 0.2.12", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "h2" +version = "0.4.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3c0b69cfcb4e1b9f1bf2f53f95f766e4661169728ec61cd3fe5a0166f2d1386" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http 1.3.1", + "indexmap", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "hashbrown" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" + +[[package]] +name = "http" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + +[[package]] +name = "http-body" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" +dependencies = [ + "bytes", + "http 1.3.1", +] + +[[package]] +name = "http-body-util" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" +dependencies = [ + "bytes", + "futures-core", + "http 1.3.1", + "http-body", + "pin-project-lite", +] + +[[package]] +name = "httparse" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" + +[[package]] +name = "httpdate" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" + +[[package]] +name = "hyper" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "h2 0.4.12", + "http 1.3.1", + "http-body", + "httparse", + "itoa", + "pin-project-lite", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" +dependencies = [ + "http 1.3.1", + "hyper", + "hyper-util", + "rustls", + "rustls-pki-types", + "tokio", + "tokio-rustls", + "tower-service", + "webpki-roots", +] + +[[package]] +name = "hyper-tls" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" +dependencies = [ + "bytes", + "http-body-util", + "hyper", + "hyper-util", + "native-tls", + "tokio", + "tokio-native-tls", + "tower-service", +] + +[[package]] +name = "hyper-util" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d9b05277c7e8da2c93a568989bb6207bef0112e8d17df7a6eda4a3cf143bc5e" +dependencies = [ + "base64", + "bytes", + "futures-channel", + "futures-core", + "futures-util", + "http 1.3.1", + "http-body", + "hyper", + "ipnet", + "libc", + "percent-encoding", + "pin-project-lite", + "socket2 0.6.0", + "system-configuration", + "tokio", + "tower-service", + "tracing", + "windows-registry", +] + +[[package]] +name = "iana-time-zone" +version = "0.1.63" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0c919e5debc312ad217002b8048a17b7d83f80703865bbfcfebb0458b0b27d8" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "iana-time-zone-haiku", + "js-sys", + "log", + "wasm-bindgen", + "windows-core", +] + +[[package]] +name = "iana-time-zone-haiku" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f" +dependencies = [ + "cc", +] + +[[package]] +name = "icu_collections" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47" +dependencies = [ + "displaydoc", + "potential_utf", + "yoke", + "zerofrom", + "zerovec", +] + +[[package]] +name = "icu_locale_core" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a" +dependencies = [ + "displaydoc", + "litemap", + "tinystr", + "writeable", + "zerovec", +] + +[[package]] +name = "icu_normalizer" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_normalizer_data", + "icu_properties", + "icu_provider", + "smallvec", + "zerovec", +] + +[[package]] +name = "icu_normalizer_data" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3" + +[[package]] +name = "icu_properties" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b" +dependencies = [ + "displaydoc", + "icu_collections", + "icu_locale_core", + "icu_properties_data", + "icu_provider", + "potential_utf", + "zerotrie", + "zerovec", +] + +[[package]] +name = "icu_properties_data" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632" + +[[package]] +name = "icu_provider" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af" +dependencies = [ + "displaydoc", + "icu_locale_core", + "stable_deref_trait", + "tinystr", + "writeable", + "yoke", + "zerofrom", + "zerotrie", + "zerovec", +] + +[[package]] +name = "idna" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +dependencies = [ + "idna_adapter", + "smallvec", + "utf8_iter", +] + +[[package]] +name = "idna_adapter" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" +dependencies = [ + "icu_normalizer", + "icu_properties", +] + +[[package]] +name = "impl-more" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8a5a9a0ff0086c7a148acb942baaabeadf9504d10400b5a05645853729b9cd2" + +[[package]] +name = "indexmap" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661" +dependencies = [ + "equivalent", + "hashbrown", +] + +[[package]] +name = "io-uring" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d93587f37623a1a17d94ef2bc9ada592f5465fe7732084ab7beefabe5c77c0c4" +dependencies = [ + "bitflags", + "cfg-if", + "libc", +] + +[[package]] +name = "ipnet" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" + +[[package]] +name = "iri-string" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbc5ebe9c3a1a7a5127f920a418f7585e9e758e911d0466ed004f393b0e380b2" +dependencies = [ + "memchr", + "serde", +] + +[[package]] +name = "is_terminal_polyfill" +version = "1.70.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" + +[[package]] +name = "itoa" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" + +[[package]] +name = "jiff" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be1f93b8b1eb69c77f24bbb0afdf66f54b632ee39af40ca21c4365a1d7347e49" +dependencies = [ + "jiff-static", + "log", + "portable-atomic", + "portable-atomic-util", + "serde", +] + +[[package]] +name = "jiff-static" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "03343451ff899767262ec32146f6d559dd759fdadf42ff0e227c7c48f72594b4" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "jobserver" +version = "0.1.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38f262f097c174adebe41eb73d66ae9c06b2844fb0da69969647bbddd9b0538a" +dependencies = [ + "getrandom 0.3.3", + "libc", +] + +[[package]] +name = "js-sys" +version = "0.3.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "language-tags" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4345964bb142484797b161f473a503a434de77149dd8c7427788c6e13379388" + +[[package]] +name = "libc" +version = "0.2.175" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a82ae493e598baaea5209805c49bbf2ea7de956d50d7da0da1164f9c6d28543" + +[[package]] +name = "linux-raw-sys" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" + +[[package]] +name = "litemap" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956" + +[[package]] +name = "local-channel" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6cbc85e69b8df4b8bb8b89ec634e7189099cea8927a276b7384ce5488e53ec8" +dependencies = [ + "futures-core", + "futures-sink", + "local-waker", +] + +[[package]] +name = "local-waker" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d873d7c67ce09b42110d801813efbc9364414e356be9935700d368351657487" + +[[package]] +name = "lock_api" +version = "0.4.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96936507f153605bddfcda068dd804796c84324ed2510809e5b2a624c81da765" +dependencies = [ + "autocfg", + "scopeguard", +] + +[[package]] +name = "log" +version = "0.4.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" + +[[package]] +name = "lru-slab" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" + +[[package]] +name = "memchr" +version = "2.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" + +[[package]] +name = "mime" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" + +[[package]] +name = "miniz_oxide" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +dependencies = [ + "adler2", +] + +[[package]] +name = "mio" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c" +dependencies = [ + "libc", + "log", + "wasi 0.11.1+wasi-snapshot-preview1", + "windows-sys 0.59.0", +] + +[[package]] +name = "native-tls" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e" +dependencies = [ + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", +] + +[[package]] +name = "object" +version = "0.36.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" +dependencies = [ + "memchr", +] + +[[package]] +name = "once_cell" +version = "1.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" + +[[package]] +name = "once_cell_polyfill" +version = "1.70.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad" + +[[package]] +name = "openssl" +version = "0.10.73" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8505734d46c8ab1e19a1dce3aef597ad87dcb4c37e7188231769bd6bd51cebf8" +dependencies = [ + "bitflags", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "openssl-probe" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" + +[[package]] +name = "openssl-sys" +version = "0.9.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90096e2e47630d78b7d1c20952dc621f957103f8bc2c8359ec81290d75238571" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "parking_lot" +version = "0.12.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70d58bf43669b5795d1576d0641cfb6fbb2057bf629506267a92807158584a13" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.9.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc838d2a56b5b1a6c25f55575dfc605fabb63bb2365f6c2353ef9159aa69e4a5" +dependencies = [ + "cfg-if", + "libc", + "redox_syscall", + "smallvec", + "windows-targets 0.52.6", +] + +[[package]] +name = "percent-encoding" +version = "2.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "pin-project-lite" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "pinepods_backend" +version = "0.1.0" +dependencies = [ + "actix-cors", + "actix-web", + "chrono", + "dotenvy", + "env_logger", + "log", + "reqwest", + "serde", + "serde_json", + "sha1", + "urlencoding", +] + +[[package]] +name = "pkg-config" +version = "0.3.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" + +[[package]] +name = "portable-atomic" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483" + +[[package]] +name = "portable-atomic-util" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8a2f0d8d040d7848a709caf78912debcc3f33ee4b3cac47d73d1e1069e83507" +dependencies = [ + "portable-atomic", +] + +[[package]] +name = "potential_utf" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5a7c30837279ca13e7c867e9e40053bc68740f988cb07f7ca6df43cc734b585" +dependencies = [ + "zerovec", +] + +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + +[[package]] +name = "ppv-lite86" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" +dependencies = [ + "zerocopy", +] + +[[package]] +name = "proc-macro2" +version = "1.0.96" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "beef09f85ae72cea1ef96ba6870c51e6382ebfa4f0e85b643459331f3daa5be0" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quinn" +version = "0.11.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "626214629cda6781b6dc1d316ba307189c85ba657213ce642d9c77670f8202c8" +dependencies = [ + "bytes", + "cfg_aliases", + "pin-project-lite", + "quinn-proto", + "quinn-udp", + "rustc-hash", + "rustls", + "socket2 0.5.10", + "thiserror", + "tokio", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-proto" +version = "0.11.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49df843a9161c85bb8aae55f101bc0bac8bcafd637a620d9122fd7e0b2f7422e" +dependencies = [ + "bytes", + "getrandom 0.3.3", + "lru-slab", + "rand", + "ring", + "rustc-hash", + "rustls", + "rustls-pki-types", + "slab", + "thiserror", + "tinyvec", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-udp" +version = "0.5.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcebb1209ee276352ef14ff8732e24cc2b02bbac986cd74a4c81bcb2f9881970" +dependencies = [ + "cfg_aliases", + "libc", + "once_cell", + "socket2 0.5.10", + "tracing", + "windows-sys 0.59.0", +] + +[[package]] +name = "quote" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "r-efi" +version = "5.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" + +[[package]] +name = "rand" +version = "0.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" +dependencies = [ + "rand_chacha", + "rand_core", +] + +[[package]] +name = "rand_chacha" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" +dependencies = [ + "ppv-lite86", + "rand_core", +] + +[[package]] +name = "rand_core" +version = "0.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" +dependencies = [ + "getrandom 0.3.3", +] + +[[package]] +name = "redox_syscall" +version = "0.5.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5407465600fb0548f1442edf71dd20683c6ed326200ace4b1ef0763521bb3b77" +dependencies = [ + "bitflags", +] + +[[package]] +name = "regex" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-lite" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53a49587ad06b26609c52e423de037e7f57f20d53535d66e08c695f347df952a" + +[[package]] +name = "regex-syntax" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" + +[[package]] +name = "reqwest" +version = "0.12.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cbc931937e6ca3a06e3b6c0aa7841849b160a90351d6ab467a8b9b9959767531" +dependencies = [ + "base64", + "bytes", + "encoding_rs", + "futures-core", + "h2 0.4.12", + "http 1.3.1", + "http-body", + "http-body-util", + "hyper", + "hyper-rustls", + "hyper-tls", + "hyper-util", + "js-sys", + "log", + "mime", + "native-tls", + "percent-encoding", + "pin-project-lite", + "quinn", + "rustls", + "rustls-pki-types", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper", + "tokio", + "tokio-native-tls", + "tokio-rustls", + "tower", + "tower-http", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "webpki-roots", +] + +[[package]] +name = "ring" +version = "0.17.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" +dependencies = [ + "cc", + "cfg-if", + "getrandom 0.2.16", + "libc", + "untrusted", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustc-demangle" +version = "0.1.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56f7d92ca342cea22a06f2121d944b4fd82af56988c270852495420f961d4ace" + +[[package]] +name = "rustc-hash" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" + +[[package]] +name = "rustix" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11181fbabf243db407ef8df94a6ce0b2f9a733bd8be4ad02b4eda9602296cac8" +dependencies = [ + "bitflags", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.60.2", +] + +[[package]] +name = "rustls" +version = "0.23.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0ebcbd2f03de0fc1122ad9bb24b127a5a6cd51d72604a3f3c50ac459762b6cc" +dependencies = [ + "once_cell", + "ring", + "rustls-pki-types", + "rustls-webpki", + "subtle", + "zeroize", +] + +[[package]] +name = "rustls-pki-types" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "229a4a4c221013e7e1f1a043678c5cc39fe5171437c88fb47151a21e6f5b5c79" +dependencies = [ + "web-time", + "zeroize", +] + +[[package]] +name = "rustls-webpki" +version = "0.103.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a17884ae0c1b773f1ccd2bd4a8c72f16da897310a98b0e84bf349ad5ead92fc" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", +] + +[[package]] +name = "rustversion" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" + +[[package]] +name = "ryu" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" + +[[package]] +name = "schannel" +version = "0.1.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" +dependencies = [ + "windows-sys 0.59.0", +] + +[[package]] +name = "scopeguard" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" + +[[package]] +name = "security-framework" +version = "2.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" +dependencies = [ + "bitflags", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49db231d56a190491cb4aeda9527f1ad45345af50b0851622a7adb8c03b01c32" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "serde" +version = "1.0.219" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.219" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.142" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "030fedb782600dcbd6f02d479bf0d817ac3bb40d644745b769d6a96bc3afc5a7" +dependencies = [ + "itoa", + "memchr", + "ryu", + "serde", +] + +[[package]] +name = "serde_urlencoded" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +dependencies = [ + "form_urlencoded", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + +[[package]] +name = "signal-hook-registry" +version = "1.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2a4719bff48cee6b39d12c020eeb490953ad2443b7055bd0b21fca26bd8c28b" +dependencies = [ + "libc", +] + +[[package]] +name = "slab" +version = "0.4.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" + +[[package]] +name = "smallvec" +version = "1.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" + +[[package]] +name = "socket2" +version = "0.5.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + +[[package]] +name = "socket2" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "233504af464074f9d066d7b5416c5f9b894a5862a6506e306f7b816cdd6f1807" +dependencies = [ + "libc", + "windows-sys 0.59.0", +] + +[[package]] +name = "stable_deref_trait" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] +name = "syn" +version = "2.0.104" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17b6f705963418cdb9927482fa304bc562ece2fdd4f616084c50b7023b435a40" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "sync_wrapper" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +dependencies = [ + "futures-core", +] + +[[package]] +name = "synstructure" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "system-configuration" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" +dependencies = [ + "bitflags", + "core-foundation", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "tempfile" +version = "3.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1" +dependencies = [ + "fastrand", + "getrandom 0.3.3", + "once_cell", + "rustix", + "windows-sys 0.59.0", +] + +[[package]] +name = "thiserror" +version = "2.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "time" +version = "0.3.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a7619e19bc266e0f9c5e6686659d394bc57973859340060a69221e57dbc0c40" +dependencies = [ + "deranged", + "itoa", + "num-conv", + "powerfmt", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9e9a38711f559d9e3ce1cdb06dd7c5b8ea546bc90052da6d06bb76da74bb07c" + +[[package]] +name = "time-macros" +version = "0.2.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3526739392ec93fd8b359c8e98514cb3e8e021beb4e5f597b00a0221f8ed8a49" +dependencies = [ + "num-conv", + "time-core", +] + +[[package]] +name = "tinystr" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b" +dependencies = [ + "displaydoc", + "zerovec", +] + +[[package]] +name = "tinyvec" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09b3661f17e86524eccd4371ab0429194e0d7c008abb45f7a7495b1719463c71" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tokio" +version = "1.47.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89e49afdadebb872d3145a5638b59eb0691ea23e46ca484037cfab3b76b95038" +dependencies = [ + "backtrace", + "bytes", + "io-uring", + "libc", + "mio", + "parking_lot", + "pin-project-lite", + "signal-hook-registry", + "slab", + "socket2 0.6.0", + "windows-sys 0.59.0", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.26.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e727b36a1a0e8b74c376ac2211e40c2c8af09fb4013c60d910495810f008e9b" +dependencies = [ + "rustls", + "tokio", +] + +[[package]] +name = "tokio-util" +version = "0.7.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14307c986784f72ef81c89db7d9e28d6ac26d16213b109ea501696195e6e3ce5" +dependencies = [ + "bytes", + "futures-core", + "futures-sink", + "pin-project-lite", + "tokio", +] + +[[package]] +name = "tower" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper", + "tokio", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-http" +version = "0.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2" +dependencies = [ + "bitflags", + "bytes", + "futures-util", + "http 1.3.1", + "http-body", + "iri-string", + "pin-project-lite", + "tower", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-layer" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" + +[[package]] +name = "tower-service" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" + +[[package]] +name = "tracing" +version = "0.1.41" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" +dependencies = [ + "log", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "tracing-core" +version = "0.1.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678" +dependencies = [ + "once_cell", +] + +[[package]] +name = "try-lock" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" + +[[package]] +name = "typenum" +version = "1.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" + +[[package]] +name = "unicode-ident" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" + +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + +[[package]] +name = "url" +version = "2.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" +dependencies = [ + "form_urlencoded", + "idna", + "percent-encoding", +] + +[[package]] +name = "urlencoding" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" + +[[package]] +name = "utf8_iter" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + +[[package]] +name = "utf8parse" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" + +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + +[[package]] +name = "want" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e" +dependencies = [ + "try-lock", +] + +[[package]] +name = "wasi" +version = "0.11.1+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" + +[[package]] +name = "wasi" +version = "0.14.2+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" +dependencies = [ + "wit-bindgen-rt", +] + +[[package]] +name = "wasm-bindgen" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" +dependencies = [ + "cfg-if", + "once_cell", + "rustversion", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" +dependencies = [ + "bumpalo", + "log", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-futures" +version = "0.4.50" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61" +dependencies = [ + "cfg-if", + "js-sys", + "once_cell", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "web-sys" +version = "0.3.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "web-time" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "webpki-roots" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e8983c3ab33d6fb807cfcdad2491c4ea8cbc8ed839181c7dfd9c67c83e261b2" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "windows-core" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3" +dependencies = [ + "windows-implement", + "windows-interface", + "windows-link", + "windows-result", + "windows-strings", +] + +[[package]] +name = "windows-implement" +version = "0.60.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "windows-interface" +version = "0.59.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "windows-link" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" + +[[package]] +name = "windows-registry" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b8a9ed28765efc97bbc954883f4e6796c33a06546ebafacbabee9696967499e" +dependencies = [ + "windows-link", + "windows-result", + "windows-strings", +] + +[[package]] +name = "windows-result" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-strings" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.60.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" +dependencies = [ + "windows-targets 0.53.3", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm 0.52.6", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", +] + +[[package]] +name = "windows-targets" +version = "0.53.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5fe6031c4041849d7c496a8ded650796e7b6ecc19df1a431c1a363342e5dc91" +dependencies = [ + "windows-link", + "windows_aarch64_gnullvm 0.53.0", + "windows_aarch64_msvc 0.53.0", + "windows_i686_gnu 0.53.0", + "windows_i686_gnullvm 0.53.0", + "windows_i686_msvc 0.53.0", + "windows_x86_64_gnu 0.53.0", + "windows_x86_64_gnullvm 0.53.0", + "windows_x86_64_msvc 0.53.0", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnu" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_i686_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" + +[[package]] +name = "wit-bindgen-rt" +version = "0.39.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" +dependencies = [ + "bitflags", +] + +[[package]] +name = "writeable" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" + +[[package]] +name = "yoke" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc" +dependencies = [ + "serde", + "stable_deref_trait", + "yoke-derive", + "zerofrom", +] + +[[package]] +name = "yoke-derive" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zerocopy" +version = "0.8.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1039dd0d3c310cf05de012d8a39ff557cb0d23087fd44cad61df08fc31907a2f" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.8.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ecf5b4cc5364572d7f4c329661bcc82724222973f2cab6f050a4e5c22f75181" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zerofrom" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" +dependencies = [ + "zerofrom-derive", +] + +[[package]] +name = "zerofrom-derive" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "synstructure", +] + +[[package]] +name = "zeroize" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" + +[[package]] +name = "zerotrie" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + +[[package]] +name = "zerovec" +version = "0.11.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7aa2bd55086f1ab526693ecbe444205da57e25f4489879da80635a46d90e73b" +dependencies = [ + "yoke", + "zerofrom", + "zerovec-derive", +] + +[[package]] +name = "zerovec-derive" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "zstd" +version = "0.13.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a" +dependencies = [ + "zstd-safe", +] + +[[package]] +name = "zstd-safe" +version = "7.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d" +dependencies = [ + "zstd-sys", +] + +[[package]] +name = "zstd-sys" +version = "2.0.15+zstd.1.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb81183ddd97d0c74cedf1d50d85c8d08c1b8b68ee863bdee9e706eedba1a237" +dependencies = [ + "cc", + "pkg-config", +] diff --git a/Backend/pinepods_backend/Cargo.toml b/Backend/pinepods_backend/Cargo.toml index cd683e4b..773afca6 100644 --- a/Backend/pinepods_backend/Cargo.toml +++ b/Backend/pinepods_backend/Cargo.toml @@ -7,13 +7,13 @@ edition = "2021" [dependencies] actix-web = "4.11.0" -serde = { version = "1.0.209", features = ["derive"] } -serde_json = "1.0.141" -reqwest = { version = "0.12.22", features = ["json", "rustls-tls"] } +serde = { version = "1.0.225", features = ["derive"] } +serde_json = "1.0.145" +reqwest = { version = "0.12.23", features = ["json", "rustls-tls"] } env_logger = "0.11.8" -log = "0.4.27" +log = "0.4.28" dotenvy = "0.15.7" sha1 = "0.10.6" urlencoding = "2.1.3" actix-cors = "0.7.1" -chrono = { version = "0.4.41", features = ["serde"] } +chrono = { version = "0.4.42", features = ["serde"] } diff --git a/Dockerfile.validator b/Dockerfile.validator new file mode 100644 index 00000000..c99992a7 --- /dev/null +++ b/Dockerfile.validator @@ -0,0 +1,28 @@ +FROM python:3.11-slim + +# Install PostgreSQL dev libraries and required packages +RUN apt-get update && apt-get install -y \ + libpq-dev \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Install required packages +RUN pip install psycopg[binary] mysql-connector-python cryptography passlib argon2-cffi + +# Copy validation scripts +COPY database_functions/ /app/database_functions/ +COPY validate_db.py /app/ + +# Set working directory +WORKDIR /app + +# Set default environment variables for MySQL (TEST ONLY - NOT SECURE) +ENV DB_TYPE=mysql +ENV DB_HOST=mysql_db +ENV DB_PORT=3306 +ENV DB_USER=root +ENV DB_PASSWORD=test_password_123 +ENV DB_NAME=pinepods_database + +# Run validator +CMD ["python", "validate_db.py", "--verbose"] \ No newline at end of file diff --git a/Dockerfile.validator.postgres b/Dockerfile.validator.postgres new file mode 100644 index 00000000..d733ca8d --- /dev/null +++ b/Dockerfile.validator.postgres @@ -0,0 +1,28 @@ +FROM python:3.11-slim + +# Install PostgreSQL dev libraries and required packages +RUN apt-get update && apt-get install -y \ + libpq-dev \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +# Install required packages +RUN pip install psycopg[binary] mysql-connector-python cryptography passlib argon2-cffi + +# Copy validation scripts +COPY database_functions/ /app/database_functions/ +COPY validate_db.py /app/ + +# Set working directory +WORKDIR /app + +# Set default environment variables for PostgreSQL (TEST ONLY - NOT SECURE) +ENV DB_TYPE=postgresql +ENV DB_HOST=postgres_db +ENV DB_PORT=5432 +ENV DB_USER=postgres +ENV DB_PASSWORD=test_password_123 +ENV DB_NAME=pinepods_database + +# Run validator +CMD ["python", "validate_db.py", "--verbose"] \ No newline at end of file diff --git a/README.md b/README.md index 4f692051..bef12c7d 100644 --- a/README.md +++ b/README.md @@ -36,7 +36,7 @@ # Getting Started -PinePods is a Rust based podcast management system that manages podcasts with multi-user support and relies on a central database with clients to connect to it. It's browser based and your podcasts and settings follow you from device to device due to everything being stored on the server. You can subscribe to podcasts and even hosts for podcasts with the help of the PodPeopleDB. It works on mobile devices and can also sync with a Nextcloud server or gpodder compatible sync server so you can use external apps like Antennapod as well! +PinePods is a Rust based podcast management system that manages podcasts with multi-user support and relies on a central database with clients to connect to it. It's browser based and your podcasts and settings follow you from device to device due to everything being stored on the server. You can subscribe to podcasts and even hosts for podcasts with the help of the PodPeopleDB. It has a native mobile app for Ios and Android and comes prebaked with it own internal gpodder server so you can use external apps like Antennapod as well! For more information than what's provided in this repo visit the [documentation site](https://www.pinepods.online/). @@ -46,7 +46,9 @@ For more information than what's provided in this repo visit the [documentation ## Features -Pinepods is a complete podcast management system and allows you to play, download, and keep track of podcasts you (or any of your users) enjoy. It allows for searching and subscribing to hosts and podcasts using The Podcast Index or Itunes and provides a modern looking UI to browse through shows and episodes. In addition, Pinepods provides simple user management and can be used by multiple users at once using a browser or app version. Everything is saved into a MySQL or Postgres database including user settings, podcasts and episodes. It's fully self-hosted, open-sourced, and I provide an option to use a hosted search API or you can also get one from the Podcast Index and use your own. There's even many different themes to choose from! Everything is fully dockerized and I provide a simple guide found below explaining how to install and run Pinepods on your own system. +Pinepods is a complete podcast management system and allows you to play, download, and keep track of podcasts you (or any of your users) enjoy. It allows for searching and subscribing to hosts and podcasts using The Podcast Index or Itunes and provides a modern looking UI to browse through shows and episodes. In addition, Pinepods provides simple user management and can be used by multiple users at once using a browser or app version. Everything is saved into a MySQL, MariaDB, or Postgres database including user settings, podcasts and episodes. It's fully self-hosted, open-sourced, and I provide an option to use a hosted search API or you can also get one from the Podcast Index and use your own. There's even many different themes to choose from! Everything is fully dockerized and I provide a simple guide found below explaining how to install and run Pinepods on your own system. + +There's plenty more features as well, check out the [Pinepods Site](https://www.pinepods.online/docs/Features/smart-playlists) for more! ## Try it out! :zap: @@ -67,6 +69,13 @@ You can also choose to use MySQL/MariaDB or Postgres as your database. Examples ### Docker Compose +> **⚠️ WARNING:** An issue was recently pointed out to me related to postgres version 18. If you run into an error that looks like this on startup when using postgres: + +``` +Failed to deploy a stack: compose up operation failed: Error response from daemon: failed to create task for container: failed to create shim task: OCI runtime create failed: runc create failed: unable to start container process: error during container init: error mounting "" to rootfs at "/var/lib/postgresql/data": change mount propagation through procfd: open o_path procfd: open //overlay2/17561d31d0730b3fd3071752d82cf8fe60b2ea0ed84521c6ee8b06427ca8f064/merged/var/lib/postgresql/data: no such file or directory: unknown` +``` +> Please change your postgres tag in your compose to '17'. See [this issue](https://github.com/docker-library/postgres/issues/1363) for more details. + #### User Permissions Pinepods can run with specific user permissions to ensure downloaded files are accessible on the host system. This is controlled through two environment variables: - `PUID`: Process User ID (defaults to 1000 if not set) @@ -83,7 +92,7 @@ id -g # Your GID services: db: container_name: db - image: postgres:latest + image: postgres:17 environment: POSTGRES_DB: pinepods_database POSTGRES_USER: postgres @@ -91,14 +100,11 @@ services: PGDATA: /var/lib/postgresql/data/pgdata volumes: - /home/user/pinepods/pgdata:/var/lib/postgresql/data - ports: - - "5432:5432" restart: always valkey: image: valkey/valkey:8-alpine - ports: - - "6379:6379" + restart: always pinepods: image: madeofpendletonwool/pinepods:latest @@ -132,6 +138,7 @@ services: # Timezone volumes, HIGHLY optional. Read the timezone notes below - /etc/localtime:/etc/localtime:ro - /etc/timezone:/etc/timezone:ro + restart: always depends_on: - db - valkey @@ -142,7 +149,7 @@ services: services: db: container_name: db - image: mariadb:latest + image: mariadb:12 command: --wait_timeout=1800 environment: MYSQL_TCP_PORT: 3306 @@ -153,14 +160,10 @@ services: MYSQL_INIT_CONNECT: 'SET @@GLOBAL.max_allowed_packet=64*1024*1024;' volumes: - /home/user/pinepods/sql:/var/lib/mysql - ports: - - "3306:3306" restart: always valkey: image: valkey/valkey:8-alpine - ports: - - "6379:6379" pinepods: image: madeofpendletonwool/pinepods:latest @@ -223,12 +226,12 @@ Most of those are pretty obvious, but let's break a couple of them down. #### Admin User Info -First of all, the USERNAME, PASSWORD, FULLNAME, and EMAIL vars are your details for your default admin account. This account will have admin credentials and will be able to log in right when you start up the app. Once started you'll be able to create more users and even more admins but you need an account to kick things off on. If you don't specify credentials in the compose file it will create an account with a random password for you but I would recommend just creating one for yourself. +First of all, the USERNAME, PASSWORD, FULLNAME, and EMAIL vars are your details for your default admin account. This account will have admin credentials and will be able to log in right when you start up the app. Once started you'll be able to create more users and even more admins but you need an account to kick things off on. If you don't specify credentials in the compose file it will prompt you to create an account before first login. #### Note on the Search API -Let's talk quickly about the searching API. This allows you to search for new podcasts and it queries either itunes or the podcast index for new podcasts. The podcast index requires an api key while itunes does not. If you'd rather not mess with the api at all simply set the API_URL to the one below. +Let's talk quickly about the searching API. This allows you to search for new podcasts and it queries either itunes or the podcast index for new podcasts. It also allows for searching youtube channels via the Google Search API. The podcast index and Google Search require an api key while itunes does not. If you'd rather not mess with the api at all simply set the API_URL to the one below, however, know that Google implements a limit per day on youtube searches and the search api that I maintain below hits it's limit pretty quick. So if you're a big youtube user you might want to host your own. ``` SEARCH_API_URL: 'https://search.pinepods.online/api/search' @@ -506,6 +509,10 @@ paru -S pinepods #### Flatpak + + Get it on Flathub + + You can search for Pinepods in your favorite flatpak installer gui app such as Gnome Software. Flathub page can be found [here](https://flathub.org/apps/com.gooseberrydevelopment.pinepods) @@ -520,7 +527,7 @@ I have had such a nightmare trying to make the snap client work. Pass, use the f ### Windows Client Install :computer: -Any of the client additions are super easy to get going. First head over to the releases page on Github +First head over to the releases page on Github https://github.com/madeofpendletonwool/PinePods/releases @@ -536,7 +543,7 @@ Once started you'll be able to sign in with your username and password. The serv ### Mac Client Install :computer: -Any of the client additions are super easy to get going. First head over to the releases page on Github +First head over to the releases page on Github https://github.com/madeofpendletonwool/PinePods/releases @@ -550,11 +557,23 @@ Once started you'll be able to sign in with your username and password. The serv ### Android Install :iphone: -For now, it's a manual install and there are some issues with the app. Check the releases page for the latest apk. + + Get it on IzzyOnDroid + + + + Get it on Obtainium + + +Currently there's options for direct downloads and Pinepods is on the IzzyOnDroid storefront! More locations coming soon! ### iOS Install :iphone: -Coming Soon - The web app works great for phones. + + Download on the App Store + + +The iOS app has arrived! Enjoy! ## PodPeople DB @@ -568,17 +587,11 @@ Finally, you can check out the Repo for it [here!](https://github.com/madeofpend ## Pinepods Firewood -A CLI only client that can be used to remotely share your podcasts to is in the works! Check out [Pinepods Firewood!](https://github.com/madeofpendletonwool/pinepods-firewood) +A CLI only client that can be used to remotely share your podcasts to has had it's first release! Now you can enjoy podcasts from the comfort of your terminal! Check out [Pinepods Firewood!](https://github.com/madeofpendletonwool/pinepods-firewood) ## Platform Availability -The Intention is for this app to become available on Windows, Linux, Mac, Android, and iOS. Windows, Linux, Mac, web, and android are all currently available and working. The android app is in a sort of beta currently as I finalize any remaining issues with it. Track those [here](https://github.com/madeofpendletonwool/PinePods/issues/320). This app is built with Tauri, therefore once the Android version is in a final state there's no reason I can't just compile it to iOS as well. - -For a podcast sync app I recommend Opodsync, but nextcloud sync works great too! This is only required if you use an app like AntennaPods. So then your Pinepods and Antennapods sync up podcasts. - -[OpodSync](https://github.com/kd2org/opodsync) - -[Nextcloud Podcast Sync App](https://apps.nextcloud.com/apps/gpoddersync) +The Intention is for this app to become available on Windows, Linux, Mac, Android, and iOS. Windows, Linux, Mac, web, and android are all currently available and working. ARM devices are also supported including raspberry pis. The app is shockingly performant on a raspberry pi as well. The only limitation is that a 64bit OS is required on an ARM device. Setup is exactly the same, just use the latest tag and docker will auto pull the ARM version. @@ -589,17 +602,12 @@ ARM devices are also supported including raspberry pis. The app is shockingly pe - [ ] Nix Package - [x] Aur Package - [x] Helm Chart and repo for kubernetes deployment -- [ ] Mobile Apps +- [x] Mobile Apps - [x] Android App - Beta - [ ] Android Auto support - - [ ] iOS App + - [x] iOS App - [ ] Packaging and automation -### Long term goals - -- [ ] Podcast ad blocking. Either by parsing audio blocks with ai and filtering ads or by utilizing a centralized server to allow others to send their ad block info to after determining the timestamps for ads. - - ## Screenshots :camera: Main Homepage with podcasts displayed @@ -668,4 +676,8 @@ Portions of the mobile app retain the original BSD license and attribution as re #### 💬 Acknowledgment -Huge thanks to Ben Hills for open-sourcing the Anytime Podcast Player. It served as a solid foundation and greatly accelerated development of PinePods. \ No newline at end of file +Huge thanks to Ben Hills for open-sourcing the Anytime Podcast Player. It served as a solid foundation and greatly accelerated development of PinePods. + +#### 🌐 Translation + +Translations are managed through [Weblate](https://hosted.weblate.org), a web-based translation tool that makes it easy for the community to contribute translations. If you'd like to help translate PinePods into your language, please visit our Weblate project and join the translation effort! diff --git a/clients/clientapi.py b/clients/clientapi.py deleted file mode 100644 index f84b5e11..00000000 --- a/clients/clientapi.py +++ /dev/null @@ -1,6924 +0,0 @@ -# Fast API -from fastapi import FastAPI, WebSocket, WebSocketDisconnect, Depends, HTTPException, status, Header, Body, Path, Form, Query, \ - security, BackgroundTasks, UploadFile -from fastapi.security import APIKeyHeader, HTTPBasic, HTTPBasicCredentials -from fastapi.responses import PlainTextResponse, JSONResponse, Response, FileResponse, StreamingResponse, RedirectResponse -from fastapi.middleware.cors import CORSMiddleware -from starlette.concurrency import run_in_threadpool -from threading import Lock -import smtplib -from email.mime.text import MIMEText -from email.mime.multipart import MIMEMultipart -from functools import lru_cache, wraps -from yt_dlp import YoutubeDL -import subprocess -import threading - -# Needed Modules -from passlib.context import CryptContext -import mysql.connector -from mysql.connector import pooling -from time import time -from mysql.connector.pooling import MySQLConnectionPool -from mysql.connector import Error -import psycopg -from psycopg_pool import ConnectionPool -from psycopg.rows import dict_row -from psycopg.errors import UniqueViolation, ForeignKeyViolation, OperationalError -import os -import xml.etree.ElementTree as ET -from fastapi.middleware.gzip import GZipMiddleware -from starlette.middleware.sessions import SessionMiddleware -from starlette.requests import Request -import secrets -from pydantic import BaseModel, Field, HttpUrl -from typing import Dict, List, Any, Optional, Generator, Tuple, Set, TypedDict, Callable -import json -import logging -import argparse -import sys -from pyotp import TOTP, random_base32 -import base64 -import traceback -import time -import httpx -import asyncio -import io -import qrcode -import qrcode.image.svg -from urllib.parse import urlparse, urlunparse -import datetime -import feedparser -import dateutil.parser -import re -import requests -from requests.auth import HTTPBasicAuth -from contextlib import contextmanager -import signal - -def sigterm_handler(_signo, _stack_frame): - # Perform cleanup here - print("Received SIGTERM. Shutting down...") - sys.exit(0) - -signal.signal(signal.SIGTERM, sigterm_handler) - -# Internal Modules -sys.path.append('/pinepods') - -import database_functions.functions -import database_functions.auth_functions -import database_functions.app_functions -import database_functions.import_progress -import database_functions.oidc_state_manager -import database_functions.valkey_client -import database_functions.youtube -import database_functions.tasks -from database_functions.gpodder_router import gpodder_router -from database_functions.db_client import create_database_connection, close_database_connection - -# # Use a try-except to handle potential import errors -# try: -# from database_functions.tasks import ( -# download_podcast_task, -# download_youtube_video_task, -# queue_podcast_downloads, -# task_manager, # Changed from download_manager to task_manager -# download_manager, # Keep this for backward compatibility -# get_all_active_tasks, # Add this new function -# debug_task -# ) -# CELERY_AVAILABLE = True -# print('celery tasks imported') -# except ImportError as e: -# print(f"Failed to import Celery tasks: {e}") -# CELERY_AVAILABLE = False -# # Define fallback functions if needed - - -database_type = str(os.getenv('DB_TYPE', 'mariadb')) -if database_type == "postgresql": - print(f"You've selected a postgresql database.") -else: - print("You've selected a mariadb database") - -secret_key_middle = secrets.token_hex(32) - -# Temporary storage for MFA secrets -temp_mfa_secrets = {} - -app = FastAPI() -security = HTTPBasic() -origins = [ - "http://localhost", - "http://localhost:8080", - "http://127.0.0.1:8080", - "http://127.0.0.1", - "*" -] - -app.include_router(gpodder_router) - -# app.add_middleware( -# CORSMiddleware, -# allow_origins=origins, -# allow_credentials=True, -# allow_methods=["*"], -# allow_headers=["*"], -# ) - -app.add_middleware(GZipMiddleware, minimum_size=1000) -app.add_middleware(SessionMiddleware, secret_key=secret_key_middle) - - -API_KEY_NAME = "pinepods_api" -api_key_header = APIKeyHeader(name=API_KEY_NAME, auto_error=False) - -pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto") - -# Proxy variables -proxy_host = os.environ.get("HOSTNAME", "localhost") -proxy_port = os.environ.get("PINEPODS_PORT", "8040") -proxy_protocol = os.environ.get("PROXY_PROTOCOL", "http") -reverse_proxy = os.environ.get("REVERSE_PROXY", "False") - -# Podcast Index API url -api_url = os.environ.get("SEARCH_API_URL", "https://search.pinepods.online/api/search") -people_url = os.environ.get("PEOPLE_API_URL", "https://people.pinepods.online") - -# Initial Vars needed to start and used throughout -if reverse_proxy == "True": - proxy_url = f'{proxy_protocol}://{proxy_host}/mover/?url=' -else: - proxy_url = f'{proxy_protocol}://{proxy_host}:{proxy_port}/mover/?url=' - -logger = logging.getLogger(__name__) - - -def get_database_connection(): - """FastAPI dependency for getting a database connection""" - try: - db = create_database_connection() - yield db - except HTTPException: - raise # Re-raise the HTTPException to let FastAPI handle it properly - except Exception as e: - logger.error(f"Database connection error of type {type(e).__name__} with arguments: {e.args}") - logger.error(traceback.format_exc()) - raise HTTPException(500, "Unable to connect to the database") - finally: - try: - close_database_connection(db) - except Exception as e: - logger.error(f"Error in connection cleanup: {str(e)}") - - -def get_api_keys(cnx): - logging.info("Executing get_api_keys function...") - if database_type == "postgresql": - # Use dict_row row factory for PostgreSQL - cnx.row_factory = dict_row - cursor = cnx.cursor() - query = 'SELECT * FROM "APIKeys"' - else: # Assuming MariaDB/MySQL if not PostgreSQL - cursor = cnx.cursor(dictionary=True) - query = "SELECT * FROM APIKeys" - - try: - cursor.execute(query) - rows = cursor.fetchall() - except Exception as e: - logging.error(f"Database error: {e}") - raise - logging.info(f"Retrieved API keys: {rows}") - - cursor.close() - return rows - - -def get_api_key(request: Request, api_key: str = Depends(api_key_header), - cnx: Generator = Depends(get_database_connection)): - if api_key is None: - raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="API key is missing") - - api_keys = get_api_keys(cnx) - - for api_key_entry in api_keys: - stored_key = api_key_entry.get("APIKey".lower(), None) - client_id = api_key_entry.get("APIKeyID".lower(), None) - - if api_key == stored_key: # Direct comparison instead of using Passlib - request.session["api_key"] = api_key # Store the API key - return client_id - - raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid API key") - - -def get_api_key_from_header(api_key: str = Header(None, name="Api-Key")): - if not api_key: - raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Not authenticated") - return api_key - -class Web_Key: - def __init__(self): - self.web_key = None - - def get_web_key(self, cnx): - self.web_key = database_functions.functions.get_web_key(cnx, database_type) - return self.web_key - -base_webkey = Web_Key() - -async def initialize_web_key(): - cnx = create_database_connection() - try: - base_webkey.get_web_key(cnx) - finally: - close_database_connection(cnx) - -def direct_database_connection(): - """Get a direct database connection - alias for create_database_connection""" - return create_database_connection() - -async def get_current_user(credentials: HTTPBasicCredentials = Depends(security)): - # Use credentials.username and credentials.password where needed - return credentials - - -# Modify check_if_admin to handle initialization -async def check_if_admin(api_key: str = Depends(get_api_key_from_header), cnx=Depends(get_database_connection)): - # Initialize web key if not already set - if base_webkey.web_key is None: - await initialize_web_key() - - # Debug logging - print(f"Checking admin access - API Key: {api_key}, Web Key: {base_webkey.web_key}") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - if is_web_key: - return True - - user_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - if not user_id: - raise HTTPException(status_code=403, detail="Invalid API key.") - - if user_id == 1: - return True - - is_admin = database_functions.functions.user_admin_check(cnx, database_type, user_id) - if not is_admin: - raise HTTPException(status_code=403, detail="User not authorized.") - - return True - - -def check_if_admin_inner(api_key: str, cnx): - user_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if not user_id: - return False - return database_functions.functions.user_admin_check(cnx, database_type, user_id) - -async def has_elevated_access(api_key: str, cnx): - # Check if it's an admin - is_admin = await run_in_threadpool(check_if_admin_inner, api_key, cnx) - # Check if it's the web key - web_key = base_webkey.web_key - is_web_key = api_key == web_key - - return is_admin or is_web_key - -@app.get('/api/pinepods_check') -async def pinepods_check(): - return {"status_code": 200, "pinepods_instance": True} - - -@app.get('/api/data/verify_key') -async def verify_key(cnx=Depends(get_database_connection), api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if is_valid_key: - return {"status": "success"} - else: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - -@app.get('/api/data/get_user') -async def get_user(cnx=Depends(get_database_connection), api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if is_valid_key: - retrieved_id = database_functions.functions.get_api_user(cnx, database_type, api_key) - return {"status": "success", "retrieved_id": retrieved_id} - else: - raise HTTPException(status_code=403, - detail="Your api-key appears to be incorrect.") - -@app.get('/api/data/get_key') -async def get_key(cnx=Depends(get_database_connection), - credentials: HTTPBasicCredentials = Depends(get_current_user)): - is_password_valid = database_functions.auth_functions.verify_password(cnx, database_type, credentials.username.lower(), credentials.password) - if is_password_valid: - retrieved_key = database_functions.functions.get_api_key(cnx, database_type, credentials.username.lower()) - return {"status": "success", "retrieved_key": retrieved_key} - else: - raise HTTPException(status_code=403, - detail="Your credentials appear to be incorrect.") - -@app.get("/api/data/config") -async def api_config(api_key: str = Depends(get_api_key_from_header), cnx=Depends(get_database_connection)): - global api_url, proxy_url, proxy_host, proxy_port, proxy_protocol, reverse_proxy - - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if is_valid_key: - return { - "api_url": api_url, - "proxy_url": proxy_url, - "proxy_host": proxy_host, - "proxy_port": proxy_port, - "proxy_protocol": proxy_protocol, - "reverse_proxy": reverse_proxy, - "people_url": people_url, - } - else: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - -@app.get("/api/data/guest_status", response_model=bool) -async def api_guest_status(cnx=Depends(get_database_connection), api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if is_valid_key: - result = database_functions.functions.guest_status(cnx, database_type) - return result - else: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - -@app.get("/api/data/download_status", response_model=bool) -async def api_download_status(cnx=Depends(get_database_connection), api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if is_valid_key: - result = database_functions.functions.download_status(cnx, database_type) - return result - else: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - -@app.get("/api/data/return_episodes/{user_id}") -async def api_return_episodes(user_id: int, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - # Allow the action if the API key belongs to the user, or it's the web API key - if key_id == user_id or is_web_key: - episodes = database_functions.functions.return_episodes(database_type, cnx, user_id) - if episodes is None: - episodes = [] # Return an empty list instead of raising an exception - return {"episodes": episodes} - else: - raise HTTPException(status_code=403, - detail="You can only return episodes of your own!") - - -@app.get("/api/data/podcast_episodes") -async def api_podcast_episodes(cnx=Depends(get_database_connection), api_key: str = Depends(get_api_key_from_header), user_id: int = Query(...), podcast_id: int = Query(...)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - # Allow the action if the API key belongs to the user, or it's the web API key - if key_id == user_id or is_web_key: - episodes = database_functions.functions.return_podcast_episodes(database_type, cnx, user_id, podcast_id) - if episodes is None: - episodes = [] # Return an empty list instead of raising an exception - # logging.error(f"Episodes returned: {episodes}") - return {"episodes": episodes} - else: - raise HTTPException(status_code=403, - detail="You can only return episodes of your own!") - -@app.get("/api/data/home_overview") -async def api_home_overview( - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header), - user_id: int = Query(...) -): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException( - status_code=403, - detail="Your API key is either invalid or does not have correct permission" - ) - - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if key_id == user_id or is_web_key: - home_data = database_functions.functions.get_home_overview(database_type, cnx, user_id) - return home_data - else: - raise HTTPException( - status_code=403, - detail="You can only view your own home overview!" - ) - -@app.get("/api/data/startpage") -async def api_get_startpage( - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header), - user_id: int = Query(...) -): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException( - status_code=403, - detail="Your API key is either invalid or does not have correct permission" - ) - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - if key_id == user_id or is_web_key: - startpage = database_functions.functions.get_user_startpage(cnx, database_type, user_id) - return {"StartPage": startpage} - else: - raise HTTPException( - status_code=403, - detail="You can only view your own StartPage setting!" - ) - -@app.post("/api/data/startpage") -async def api_set_startpage( - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header), - user_id: int = Query(...), - startpage: str = Query(...) -): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException( - status_code=403, - detail="Your API key is either invalid or does not have correct permission" - ) - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - if key_id == user_id or is_web_key: - success = database_functions.functions.set_user_startpage(cnx, database_type, user_id, startpage) - return {"success": success, "message": "StartPage updated successfully"} - else: - raise HTTPException( - status_code=403, - detail="You can only modify your own StartPage setting!" - ) - -@app.get("/api/data/youtube_episodes") -async def api_youtube_episodes(cnx=Depends(get_database_connection), api_key: str = Depends(get_api_key_from_header), user_id: int = Query(...), podcast_id: int = Query(...)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - # Allow the action if the API key belongs to the user, or it's the web API key - if key_id == user_id or is_web_key: - episodes = database_functions.functions.return_youtube_episodes(database_type, cnx, user_id, podcast_id) - if episodes is None: - episodes = [] # Return an empty list instead of raising an exception - return {"episodes": episodes} - else: - raise HTTPException(status_code=403, - detail="You can only return episodes of your own!") - - -@app.get("/api/data/get_episode_id_ep_name") -async def api_episode_id(cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header), - user_id: int = Query(...), episode_title: str = Query(...), episode_url: str = Query(...)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - # Allow the action if the API key belongs to the user, or it's the web API key - if key_id == user_id or is_web_key: - print(episode_title) - print(episode_url) - ep_id = database_functions.functions.get_episode_id_ep_name(cnx, database_type, episode_title, episode_url) - print(f"Episode ID: {ep_id}") - return ep_id - else: - raise HTTPException(status_code=403, - detail="You can only return pocast ids of your own podcasts!") - - -@app.get("/api/data/get_podcast_id") -async def api_podcast_id(cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header), - user_id: int = Query(...), podcast_feed: str = Query(...), podcast_title: str = Query(...)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - # Allow the action if the API key belongs to the user, or it's the web API key - if key_id == user_id or is_web_key: - episodes = database_functions.functions.get_podcast_id(database_type, cnx, user_id, podcast_feed, podcast_title) - if episodes is None: - episodes = [] # Return an empty list instead of raising an exception - return {"episodes": episodes} - else: - raise HTTPException(status_code=403, - detail="You can only return pocast ids of your own podcasts!") - -@app.get("/api/data/get_podcast_id_from_ep_id") -async def api_get_podcast_id( - episode_id: int, - user_id: int, - is_youtube: bool = False, # Add optional parameter - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - logging.info('Fetching API key') - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission") - - is_web_key = api_key == base_webkey.web_key - logging.info('Getting key ID') - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - logging.info(f'Got key ID: {key_id}') - - if key_id == user_id or is_web_key: - podcast_id = database_functions.functions.get_podcast_id_from_episode( - cnx, database_type, episode_id, user_id, is_youtube - ) - if podcast_id is None: - raise HTTPException(status_code=404, detail="Podcast ID not found for the given episode ID") - return {"podcast_id": podcast_id} - else: - raise HTTPException(status_code=403, detail="You can only get podcast ID for your own episodes.") - - -@app.get("/api/data/get_podcast_id_from_ep_name") -async def api_get_podcast_id_name(episode_name: str, episode_url: str, user_id: int, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - logging.info('Fetching API key') - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - logging.info('Getting key ID') - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - logging.info(f'Got key ID: {key_id}') - - # Allow the action if the API key belongs to the user or it's the web API key - if key_id == user_id or is_web_key: - podcast_id = database_functions.functions.get_podcast_id_from_episode_name(cnx, database_type, episode_name, episode_url, user_id) - if podcast_id is None: - raise HTTPException(status_code=404, detail="Podcast ID not found for the given episode name and URL") - return {"podcast_id": podcast_id} - else: - raise HTTPException(status_code=403, detail="You can only get podcast ID for your own episodes.") - - -@app.get("/api/data/get_podcast_details") -async def api_podcast_details(podcast_id: str = Query(...), cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header), - user_id: int = Query(...)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - print('in pod details') - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - # Allow the action if the API key belongs to the user, or it's the web API key - if key_id == user_id or is_web_key: - details = database_functions.functions.get_podcast_details(database_type, cnx, user_id, podcast_id) - print(f'got details {details}') - if details is None: - episodes = [] # Return an empty list instead of raising an exception - return {"details": details} - else: - raise HTTPException(status_code=403, - detail="You can only return pocast ids of your own podcasts!") - -class ClickedFeedURL(BaseModel): - podcastid: int - podcastname: str - feedurl: str - description: str - author: str - artworkurl: str - explicit: bool - episodecount: int - categories: Optional[Dict[str, str]] - websiteurl: str - podcastindexid: int - is_youtube: Optional[bool] - -@app.get("/api/data/get_podcast_details_dynamic", response_model=ClickedFeedURL) -async def get_podcast_details( - user_id: int, - podcast_title: str, - podcast_url: str, - podcast_index_id: int, - added: bool, - display_only: bool = False, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header), -): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Invalid API key or insufficient permissions") - if added: - podcast_id = database_functions.functions.get_podcast_id(database_type, cnx, user_id, podcast_url, podcast_title) - details = database_functions.functions.get_podcast_details(database_type, cnx, user_id, podcast_id) - if details is None: - raise HTTPException(status_code=404, detail="Podcast not found") - - # Handle categories field with existence check - categories = details.get("categories") if database_type != "postgresql" else details.get("categories") - if not categories: - categories_dict = {} - elif categories.startswith('{'): - try: - categories = categories.replace("'", '"') - categories_dict = json.loads(categories) - except json.JSONDecodeError as e: - print(f"JSON decode error: {e}") - raise HTTPException(status_code=500, detail="Internal server error") - else: - categories_dict = {str(i): cat.strip() for i, cat in enumerate(categories.split(','))} - - - pod_details = ClickedFeedURL( - podcastid=0, - podcastname=details["podcastname"], - feedurl=details["feedurl"], - description=details["description"], - author=details["author"], - artworkurl=details["artworkurl"], - explicit=details["explicit"], - episodecount=details["episodecount"], - categories=categories_dict, - websiteurl=details["websiteurl"], - podcastindexid=details["podcastindexid"], - is_youtube=details["isyoutubechannel"] - ) - return pod_details - else: - podcast_values = database_functions.app_functions.get_podcast_values(podcast_url, user_id, None, None, display_only) - categories = podcast_values['categories'] - print(f"heres the ep count: {podcast_values['pod_episode_count']}") - - if categories.startswith('{'): - try: - # Replace single quotes with double quotes - categories = categories.replace("'", '"') - categories_dict = json.loads(categories) - except json.JSONDecodeError as e: - print(f"JSON decode error: {e}") - raise HTTPException(status_code=500, detail="Internal server error") - else: - categories_dict = {str(i): cat.strip() for i, cat in enumerate(categories.split(','))} - - - return ClickedFeedURL( - podcastid=0, - podcastname=podcast_values['pod_title'], - feedurl=podcast_values['pod_feed_url'], - description=podcast_values['pod_description'], - author=podcast_values['pod_author'], - artworkurl=podcast_values['pod_artwork'], - explicit=podcast_values['pod_explicit'], - episodecount=podcast_values['pod_episode_count'], - categories=categories_dict, - websiteurl=podcast_values['pod_website'], - podcastindexid=podcast_index_id, - is_youtube=False - ) - -class ImportProgressResponse(BaseModel): - current: int - current_podcast: str - total: int - -@app.get("/api/data/import_progress/{user_id}") -async def get_import_progress( - user_id: int, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Invalid API key") - - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if key_id == user_id or is_web_key: - # Fetch the import progress from the database - current, total, current_podcast = database_functions.import_progress.import_progress_manager.get_progress(user_id) - return ImportProgressResponse(current=current, total=total, current_podcast=current_podcast) - else: - raise HTTPException(status_code=403, detail="You can only fetch import progress for yourself!") - -class OPMLImportRequest(BaseModel): - podcasts: List[str] - user_id: int - -@app.post("/api/data/import_opml") -async def api_import_opml( - import_request: OPMLImportRequest, - background_tasks: BackgroundTasks, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Invalid API key") - - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if key_id == import_request.user_id or is_web_key: - # Start the import process in the background - background_tasks.add_task(process_opml_import, import_request, database_type) - return {"success": True, "message": "Import process started"} - else: - raise HTTPException(status_code=403, detail="You can only import podcasts for yourself!") - - -@contextmanager -def get_db_connection(): - connection = None - try: - connection = create_database_connection() - yield connection - finally: - if connection: - close_database_connection(connection) - -def process_opml_import(import_request: OPMLImportRequest, database_type): - total_podcasts = len(import_request.podcasts) - database_functions.import_progress.import_progress_manager.start_import(import_request.user_id, total_podcasts) - for index, podcast_url in enumerate(import_request.podcasts, start=1): - try: - with get_db_connection() as cnx: - podcast_values = database_functions.app_functions.get_podcast_values(podcast_url, import_request.user_id, None, None, False) - database_functions.functions.add_podcast(cnx, database_type, podcast_values, import_request.user_id, 30) - database_functions.import_progress.import_progress_manager.update_progress(import_request.user_id, index, podcast_url) - except Exception as e: - print(f"Error importing podcast {podcast_url}: {str(e)}") - # Add a small delay to allow other requests to be processed - time.sleep(0.1) - database_functions.import_progress.import_progress_manager.clear_progress(import_request.user_id) - -class PodcastFeedData(BaseModel): - podcast_feed: str - -@app.get("/api/data/fetch_podcast_feed") -async def fetch_podcast_feed(podcast_feed: str = Query(...), cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Invalid API key or insufficient permissions") - - # Define headers that mimic a standard web browser - headers = { - "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36", - "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", - "Accept-Language": "en-US,en;q=0.5", - "Connection": "keep-alive", - "Upgrade-Insecure-Requests": "1", - "Cache-Control": "max-age=0" - } - - # Fetch the podcast feed data using httpx with browser-like headers - try: - async with httpx.AsyncClient(follow_redirects=True, timeout=30.0) as client: - response = await client.get(podcast_feed, headers=headers) - response.raise_for_status() # Will raise an httpx.HTTPStatusError for 4XX/5XX responses - return Response(content=response.content, media_type="application/xml") - except httpx.HTTPStatusError as e: - # Add more detailed error logging - error_message = f"HTTP error fetching podcast feed: {str(e)}" - logging.error(error_message) - raise HTTPException(status_code=e.response.status_code, - detail=f"Failed to fetch podcast feed: {e.response.reason_phrase}") - except httpx.RequestError as e: - # Handle request errors (network issues, etc.) - error_message = f"Request error fetching podcast feed: {str(e)}" - logging.error(error_message) - raise HTTPException(status_code=500, detail="Failed to fetch podcast feed due to network or connection issues") - except Exception as e: - # Catch-all for other unexpected errors - error_message = f"Unexpected error fetching podcast feed: {str(e)}" - logging.error(error_message) - raise HTTPException(status_code=500, detail="Unexpected error occurred while fetching the podcast feed") - -NAMESPACE = {'podcast': 'https://podcastindex.org/namespace/1.0'} - -async def fetch_feed(feed_url: str) -> str: - async with httpx.AsyncClient(follow_redirects=True) as client: - response = await client.get(feed_url) - response.raise_for_status() - return response.text - -async def fetch_json(url: str) -> Optional[dict]: - async with httpx.AsyncClient(follow_redirects=True) as client: - response = await client.get(url) - response.raise_for_status() - return response.json() - -def parse_chapters(feed_content: str, audio_url: str) -> List[Dict[str, Optional[str]]]: - chapters = [] - try: - root = ET.fromstring(feed_content) - episodes = root.findall('.//item') - for episode in episodes: - enclosure_element = episode.find('enclosure') - enclosure_url = enclosure_element.attrib.get('url') if enclosure_element is not None else None - if enclosure_element is not None and enclosure_url == audio_url: - chapters_element = episode.find('podcast:chapters', NAMESPACE) - if chapters_element is not None: - chapters_url = chapters_element.attrib.get('url') - if chapters_url: - return chapters_url # Return the chapters URL to fetch the JSON - else: - print(f"Chapter element with missing URL: {ET.tostring(chapters_element, encoding='unicode')}") - break # Exit loop once the matching episode is found - except ET.ParseError as e: - print(f"XML parsing error: {e} - Content: {feed_content[:200]}") # Log the error and first 200 characters of content - return chapters - -def parse_transcripts(feed_content: str, audio_url: str) -> List[Dict[str, Optional[str]]]: - transcripts = [] - try: - root = ET.fromstring(feed_content) - episodes = root.findall('.//item') - for episode in episodes: - enclosure_element = episode.find('enclosure') - enclosure_url = enclosure_element.attrib.get('url') if enclosure_element is not None else None - if enclosure_element is not None and enclosure_url == audio_url: - transcript_elements = episode.findall('podcast:transcript', NAMESPACE) - for transcript_element in transcript_elements: - transcript_url = transcript_element.attrib.get('url') - transcript_type = transcript_element.attrib.get('type') - transcript_language = transcript_element.attrib.get('language') - transcript_rel = transcript_element.attrib.get('rel') - transcripts.append({ - "url": transcript_url, - "mime_type": transcript_type, - "language": transcript_language, - "rel": transcript_rel - }) - break # Exit loop once the matching episode is found - except ET.ParseError as e: - print(f"XML parsing error: {e} - Content: {feed_content[:200]}") # Log the error and first 200 characters of content - return transcripts - - -class TTLCache: - def __init__(self, maxsize: int = 1000, ttl: int = 3600): - self.maxsize = maxsize - self.ttl = ttl - self.cache: Dict[Tuple, Tuple[Any, float]] = {} - - async def get_or_set(self, key: Tuple, callback: Callable): - current_time = time.time() - - # Check if key exists and hasn't expired - if key in self.cache: - result, timestamp = self.cache[key] - if current_time - timestamp < self.ttl: - return result - - # If we get here, either key doesn't exist or has expired - try: - # Await the callback here - result = await callback() - - # Store new result - self.cache[key] = (result, current_time) - - # Enforce maxsize by removing oldest entries - if len(self.cache) > self.maxsize: - oldest_key = min(self.cache.keys(), key=lambda k: self.cache[k][1]) - del self.cache[oldest_key] - - return result - except Exception as e: - logging.error(f"Error in cache callback: {e}") - raise - -def async_ttl_cache(maxsize: int = 1000, ttl: int = 3600): - cache = TTLCache(maxsize=maxsize, ttl=ttl) - - def decorator(func): - @wraps(func) - async def wrapper(*args, **kwargs): - # Create a cache key from the function arguments - key = (func.__name__, args, frozenset(kwargs.items())) - - try: - # Create an async callback - async def callback(): - return await func(*args, **kwargs) - - return await cache.get_or_set(key, callback) - except Exception as e: - logging.error(f"Error in cached function {func.__name__}: {e}") - # Fall back to calling the function directly - return await func(*args, **kwargs) - - return wrapper - return decorator - -@async_ttl_cache(maxsize=1000, ttl=3600) -async def get_podpeople_hosts(podcast_index_id: int) -> List[Dict[str, Optional[str]]]: - try: - async with httpx.AsyncClient(timeout=5.0) as client: - url = f"{people_url}/api/hosts/{podcast_index_id}" - response = await client.get(url) - response.raise_for_status() - hosts_data = response.json() - - if hosts_data: - return [{ - "name": host.get("name"), - "role": host.get("role", "Host"), - "group": None, - "img": host.get("img"), - "href": host.get("link"), - "description": host.get("description") - } for host in hosts_data] - except Exception as e: - logging.error(f"Error fetching hosts: {e}") - - return [] - -async def parse_people(feed_content: str, audio_url: Optional[str] = None, podcast_index_id: Optional[int] = None) -> List[Dict[str, Optional[str]]]: - people = [] - try: - root = ET.fromstring(feed_content) - if audio_url: - # Look for episode-specific people - episodes = root.findall('.//item') - for episode in episodes: - enclosure_element = episode.find('enclosure') - enclosure_url = enclosure_element.attrib.get('url') if enclosure_element is not None else None - if enclosure_element is not None and enclosure_url == audio_url: - person_elements = episode.findall('podcast:person', NAMESPACE) - if person_elements: - for person_element in person_elements: - people.append({ - "name": person_element.text, - "role": person_element.attrib.get('role'), - "group": person_element.attrib.get('group'), - "img": person_element.attrib.get('img'), - "href": person_element.attrib.get('href'), - }) - break - - if not people: - # Fall back to channel-wide people - person_elements = root.findall('.//channel/podcast:person', NAMESPACE) - for person_element in person_elements: - people.append({ - "name": person_element.text, - "role": person_element.attrib.get('role'), - "group": person_element.attrib.get('group'), - "img": person_element.attrib.get('img'), - "href": person_element.attrib.get('href'), - }) - except ET.ParseError as e: - logging.error(f"XML parsing error: {e} - Content: {feed_content[:200]}") - - # If no people found in the feed, fall back to podpeople_db - if not people and podcast_index_id: - # Use the async version - people = await get_podpeople_hosts(podcast_index_id) - - return people - -@app.get("/api/data/fetch_podcasting_2_data") -async def fetch_podcasting_2_data( - episode_id: int, - user_id: int, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Invalid API key or insufficient permissions") - - try: - # Get all the metadata - episode_metadata = database_functions.functions.get_episode_metadata(database_type, cnx, episode_id, user_id) - podcast_id = database_functions.functions.get_podcast_id_from_episode(cnx, database_type, episode_id, user_id) - podcast_feed = database_functions.functions.get_podcast_details(database_type, cnx, user_id, podcast_id) - - episode_url = episode_metadata['episodeurl'] - podcast_feed_url = podcast_feed['feedurl'] - podcast_index_id = database_functions.functions.get_podcast_index_id(cnx, database_type, podcast_id) - - # Set up common request parameters - headers = { - 'User-Agent': 'PinePods/1.0', - 'Accept': 'application/xml, application/rss+xml, text/xml, application/json' - } - - # Check if podcast requires authentication - auth = None - if podcast_feed.get('username') and podcast_feed.get('password'): - auth = httpx.BasicAuth( - username=podcast_feed['username'], - password=podcast_feed['password'] - ) - - # Fetch feed content with authentication if needed - async with httpx.AsyncClient(timeout=10.0, follow_redirects=True) as client: - try: - response = await client.get( - podcast_feed_url, - headers=headers, - auth=auth - ) - response.raise_for_status() - feed_content = response.text - except httpx.HTTPStatusError as e: - if e.response.status_code == 401: - logging.error(f"Authentication failed for podcast feed: {podcast_feed_url}") - raise HTTPException( - status_code=401, - detail="Authentication required or invalid credentials for podcast feed" - ) - raise - - # Parse feed content - chapters_url = parse_chapters(feed_content, episode_url) - transcripts = parse_transcripts(feed_content, episode_url) - people = await parse_people(feed_content, episode_url, podcast_index_id) - - # Get chapters if available - chapters_data = [] - if chapters_url: - try: - async with httpx.AsyncClient(timeout=5.0, follow_redirects=True) as client: - # Use same auth for chapters if it's from the same domain - chapters_auth = auth if chapters_url.startswith(podcast_feed_url) else None - chapters_headers = { - 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36', - 'Accept': 'application/json, text/javascript, */*; q=0.01', - 'Accept-Language': 'en-US,en;q=0.9', - 'Referer': podcast_feed_url - } - response = await client.get( - chapters_url, - headers=chapters_headers, - auth=chapters_auth - ) - response.raise_for_status() - chapters_data = response.json().get('chapters', []) - except Exception as e: - logging.error(f"Error fetching chapters: {e}") - # Continue with empty chapters rather than failing completely - - return { - "chapters": chapters_data, - "transcripts": transcripts, - "people": people - } - - except httpx.HTTPStatusError as e: - logging.error(f"HTTP error in fetch_podcasting_2_data: {e}") - raise HTTPException( - status_code=e.response.status_code, - detail=f"Error fetching podcast data: {str(e)}" - ) - except httpx.RequestError as e: - logging.error(f"Request error in fetch_podcasting_2_data: {e}") - raise HTTPException( - status_code=500, - detail=f"Failed to fetch podcast data: {str(e)}" - ) - except Exception as e: - logging.error(f"Error in fetch_podcasting_2_data: {e}") - # Return partial data if we have it - if any(var in locals() for var in ['chapters_data', 'transcripts', 'people']): - return { - "chapters": locals().get('chapters_data', []), - "transcripts": locals().get('transcripts', []), - "people": locals().get('people', []) - } - raise HTTPException(status_code=500, detail=str(e)) - -def is_valid_image_url(url: str) -> bool: - """Validate image URL for security""" - parsed = urlparse(url) - # Check if URL is absolute and uses http(s) - if not parsed.scheme or parsed.scheme not in ('http', 'https'): - return False - return True - -@app.get("/api/proxy/image") -async def proxy_image( - url: str = Query(..., description="URL of the image to proxy") -): - logging.info(f"Image proxy request received for URL: {url}") - - if not is_valid_image_url(url): - logging.error(f"Invalid image URL: {url}") - raise HTTPException(status_code=400, detail="Invalid image URL") - - try: - async with httpx.AsyncClient(follow_redirects=True) as client: - logging.info(f"Fetching image from: {url}") - response = await client.get(url, timeout=10.0) - logging.info(f"Image fetch response status: {response.status_code}") - logging.info(f"Response headers: {response.headers}") - - response.raise_for_status() - - content_type = response.headers.get("Content-Type", "") - logging.info(f"Content type: {content_type}") - - if not content_type.startswith(("image/", "application/octet-stream")): - logging.error(f"Invalid content type: {content_type}") - raise HTTPException(status_code=400, detail="URL does not point to an image") - - headers = { - "Content-Type": content_type, - "Cache-Control": "public, max-age=86400", - "Access-Control-Allow-Origin": "*", - "X-Content-Type-Options": "nosniff" - } - logging.info("Returning image response") - - return StreamingResponse( - response.aiter_bytes(), - headers=headers, - media_type=content_type - ) - except Exception as e: - logging.error(f"Error in image proxy: {str(e)}") - raise HTTPException(status_code=500, detail=str(e)) - - -def parse_podroll(feed_content: str) -> List[Dict[str, Optional[str]]]: - podroll = [] - try: - root = ET.fromstring(feed_content) - podroll_element = root.find('.//channel/podcast:podroll', NAMESPACE) - if podroll_element is not None: - for remote_item in podroll_element.findall('podcast:remoteItem', NAMESPACE): - podroll.append({ - "feed_guid": remote_item.attrib.get('feedGuid') - }) - except ET.ParseError as e: - logging.error(f"XML parsing error: {e} - Content: {feed_content[:200]}") # Log the error and first 200 characters of content - return podroll - -def parse_funding(feed_content: str) -> List[Dict[str, Optional[str]]]: - funding = [] - try: - root = ET.fromstring(feed_content) - funding_elements = root.findall('.//channel/podcast:funding', NAMESPACE) - for funding_element in funding_elements: - funding.append({ - "url": funding_element.attrib.get('url'), - "description": funding_element.text - }) - except ET.ParseError as e: - logging.error(f"XML parsing error: {e} - Content: {feed_content[:200]}") # Log the error and first 200 characters of content - return funding - -def parse_value(feed_content: str) -> List[Dict[str, Optional[str]]]: - value = [] - try: - root = ET.fromstring(feed_content) - value_elements = root.findall('.//channel/podcast:value', NAMESPACE) - for value_element in value_elements: - value_recipients = [] - for recipient in value_element.findall('podcast:valueRecipient', NAMESPACE): - value_recipients.append({ - "name": recipient.attrib.get('name'), - "type": recipient.attrib.get('type'), - "address": recipient.attrib.get('address'), - "split": recipient.attrib.get('split') - }) - value.append({ - "type": value_element.attrib.get('type'), - "method": value_element.attrib.get('method'), - "suggested": value_element.attrib.get('suggested'), - "recipients": value_recipients - }) - except ET.ParseError as e: - logging.error(f"XML parsing error: {e} - Content: {feed_content[:200]}") # Log the error and first 200 characters of content - return value - -def parse_hosts(feed_content: str) -> List[Dict[str, Optional[str]]]: - people = [] - try: - root = ET.fromstring(feed_content) - person_elements = root.findall('.//channel/podcast:person', NAMESPACE) - for person_element in person_elements: - role = person_element.attrib.get('role', 'host').lower() - if role == 'host': - people.append({ - "name": person_element.text, - "role": role, - "group": person_element.attrib.get('group'), - "img": person_element.attrib.get('img'), - "href": person_element.attrib.get('href') - }) - except ET.ParseError as e: - logging.error(f"XML parsing error: {e} - Content: {feed_content[:200]}") # Log the error and first 200 characters of content - return people - -async def get_podcast_hosts(cnx, database_type, podcast_id, feed_content, podcast_index_id): - # First, try to parse hosts from the feed content - hosts = parse_hosts(feed_content) - - # If no hosts found, try podpeople_db - if not hosts: - if podcast_index_id: - hosts = await get_podpeople_hosts(podcast_index_id) - - # If still no hosts found, return a default host - if not hosts: - hosts = [{ - "name": "Unknown Host", - "role": "Host", - "description": "No host information available.", - "img": None, - "href": None - }] - - return hosts - -@app.get("/api/data/fetch_podcasting_2_pod_data") -async def fetch_podcasting_2_pod_data(podcast_id: int, user_id: int, cnx=Depends(get_database_connection), api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Invalid API key or insufficient permissions") - - # Fetch the podcast details including auth credentials - podcast_feed = database_functions.functions.get_podcast_details(database_type, cnx, user_id, podcast_id) - podcast_feed_url = podcast_feed['feedurl'] - - # Set up HTTP client with authentication if credentials exist - async with httpx.AsyncClient(follow_redirects=True) as client: - headers = { - 'User-Agent': 'PinePods/1.0', - 'Accept': 'application/xml, application/rss+xml, text/xml' - } - - # Check if podcast requires authentication - auth = None - if podcast_feed.get('username') and podcast_feed.get('password'): - auth = httpx.BasicAuth( - username=podcast_feed['username'], - password=podcast_feed['password'] - ) - - try: - response = await client.get( - podcast_feed_url, - headers=headers, - auth=auth, - timeout=30.0 # Add reasonable timeout - ) - response.raise_for_status() - feed_content = response.text - - logging.info(f"Successfully fetched feed content from {podcast_feed_url}") - - # Parse the feed content for various metadata - people = await get_podcast_hosts(cnx, database_type, podcast_id, feed_content, podcast_feed['podcastindexid']) - podroll = parse_podroll(feed_content) - funding = parse_funding(feed_content) - value = parse_value(feed_content) - - logging.debug(f"Parsed metadata - People: {len(people) if people else 0} entries") - - return { - "people": people, - "podroll": podroll, - "funding": funding, - "value": value - } - - except httpx.HTTPStatusError as e: - if e.response.status_code == 401: - logging.error(f"Authentication failed for podcast feed: {podcast_feed_url}") - raise HTTPException( - status_code=401, - detail="Authentication required or invalid credentials for podcast feed" - ) - raise HTTPException( - status_code=e.response.status_code, - detail=f"Error fetching podcast feed: {str(e)}" - ) - except httpx.RequestError as e: - logging.error(f"Request error fetching podcast feed: {str(e)}") - raise HTTPException( - status_code=500, - detail=f"Failed to fetch podcast feed: {str(e)}" - ) - except Exception as e: - logging.error(f"Unexpected error processing podcast feed: {str(e)}") - raise HTTPException( - status_code=500, - detail=f"Error processing podcast feed: {str(e)}" - ) - - -class PodcastResponse(BaseModel): - podcastid: int - podcastname: str - feedurl: str - -class PodPeopleResponse(BaseModel): - success: bool - podcasts: List[PodcastResponse] - -@app.get("/api/data/podpeople/host_podcasts") -async def get_host_podcasts( - hostname: str, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - """ - Get podcasts associated with a host from the podpeople database. - """ - # Verify API key - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Invalid API key or insufficient permissions") - - try: - # Make request to podpeople database - async with httpx.AsyncClient(follow_redirects=True) as client: - logging.info(f"Making request to {people_url}/api/hostsearch?name={hostname}") - response = await client.get( - f"{people_url}/api/hostsearch", # Changed this line to match working endpoint - params={"name": hostname} - ) - response.raise_for_status() - podpeople_data = response.json() - - logging.info(f"Received response from podpeople: {podpeople_data}") - - # Transform the podpeople response into our expected format - podcasts = [] - if podpeople_data.get("success") and podpeople_data.get("podcasts"): - for podcast in podpeople_data["podcasts"]: - podcasts.append({ - 'podcastid': podcast['id'], - 'podcastname': podcast['title'], - 'feedurl': podcast['feed_url'] - }) - - logging.info(f"Transformed response: {podcasts}") - - return PodPeopleResponse( - success=True, - podcasts=podcasts - ) - - except httpx.HTTPStatusError as e: - logging.error(f"HTTP error from podpeople: {str(e)}") - raise HTTPException( - status_code=e.response.status_code, - detail=f"Error from podpeople service: {str(e)}" - ) - except httpx.RequestError as e: - logging.error(f"Error connecting to podpeople: {str(e)}") - raise HTTPException( - status_code=500, - detail=f"Error connecting to podpeople service: {str(e)}" - ) - except Exception as e: - logging.error(f"Unexpected error: {str(e)}") - raise HTTPException( - status_code=500, - detail=f"Unexpected error: {str(e)}" - ) - -@app.post("/api/data/check_episode_playback") -async def api_check_episode_playback( - user_id: int = Form(...), - episode_title: Optional[str] = Form(None), - episode_url: Optional[str] = Form(None), - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - # Allow the action if the API key belongs to the user or it's the web API key - if key_id == user_id or is_web_key: - logging.info(f"Received: user_id={user_id}, episode_title={episode_title}, episode_url={episode_url}") - - has_playback, listen_duration = database_functions.functions.check_episode_playback( - cnx, database_type, user_id, episode_title, episode_url - ) - if has_playback: - logging.info("Playback found, listen_duration={}".format(listen_duration)) - return {"has_playback": True, "listen_duration": listen_duration} - else: - logging.info("No playback found") - return {"has_playback": False, "listen_duration": 0} - else: - raise HTTPException(status_code=403, - detail="You can only check playback for yourself!") - - -@app.get("/api/data/user_details_id/{user_id}") -async def api_get_user_details_id(user_id: int, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - elevated_access = await has_elevated_access(api_key, cnx) - - if not elevated_access: - # Get user ID from API key - user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if user_id != user_id_from_api_key: - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, - detail="You are not authorized to access these user details") - result = database_functions.functions.get_user_details_id(cnx, database_type, user_id) - if result: - return result - else: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found") - - -@app.get("/api/data/get_theme/{user_id}") -async def api_get_theme(user_id: int, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - # Allow the action if the API key belongs to the user, or it's the web API key - if key_id == user_id or is_web_key: - theme = database_functions.functions.get_theme(cnx, database_type, user_id) - return {"theme": theme} - else: - raise HTTPException(status_code=403, - detail="You can only get themes for yourself!") - - -class PodcastValuesModel(BaseModel): - pod_title: str - pod_artwork: str - pod_author: str - categories: dict - pod_description: str - pod_episode_count: int - pod_feed_url: str - pod_website: str - pod_explicit: bool - user_id: int - -class AddPodcastRequest(BaseModel): - podcast_values: PodcastValuesModel - podcast_index_id: int = Field(default=0) - -@app.post("/api/data/add_podcast") -async def api_add_podcast( - request: AddPodcastRequest, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if key_id == request.podcast_values.user_id or is_web_key: - if database_functions.functions.check_gpodder_settings(database_type, cnx, request.podcast_values.user_id): - gpodder_url, gpodder_token, gpodder_login = database_functions.functions.get_nextcloud_settings(database_type, cnx, request.podcast_values.user_id) - gpod_type = database_functions.functions.get_gpodder_type(cnx, database_type, request.podcast_values.user_id) - - if gpod_type == "gpodder": - default_device = database_functions.functions.get_default_gpodder_device(cnx, database_type, request.podcast_values.user_id) - device_name = default_device["name"] if default_device else f"pinepods-internal-{request.podcast_values.user_id}" - - if gpod_type == "nextcloud": - database_functions.functions.add_podcast_to_nextcloud(cnx, database_type, gpodder_url, gpodder_login, gpodder_token, request.podcast_values.pod_feed_url) - else: - database_functions.functions.add_podcast_to_opodsync(cnx, database_type, request.podcast_values.user_id, gpodder_url, gpodder_login, gpodder_token, request.podcast_values.pod_feed_url, device_name) - - result = database_functions.functions.add_podcast( - cnx, - database_type, - request.podcast_values.dict(), - request.podcast_values.user_id, - 30, - podcast_index_id=request.podcast_index_id - ) - - if isinstance(result, tuple): - podcast_id, first_episode_id = result - else: - podcast_id = result - first_episode_id = None # Or fetch it if needed - - if podcast_id: - return {"success": True, "podcast_id": podcast_id, "first_episode_id": first_episode_id} - else: - raise HTTPException(status_code=403, - detail="You can only add podcasts for yourself!") - -@app.post("/api/data/enable_disable_guest") -async def api_enable_disable_guest(is_admin: bool = Depends(check_if_admin), cnx=Depends(get_database_connection)): - database_functions.functions.enable_disable_guest(cnx, database_type) - return {"success": True} - - -@app.post("/api/data/enable_disable_downloads") -async def api_enable_disable_downloads(is_admin: bool = Depends(check_if_admin), cnx=Depends(get_database_connection)): - database_functions.functions.enable_disable_downloads(cnx, database_type) - return {"success": True} - - -@app.post("/api/data/enable_disable_self_service") -async def api_enable_disable_self_service(is_admin: bool = Depends(check_if_admin), - cnx=Depends(get_database_connection)): - database_functions.functions.enable_disable_self_service(cnx, database_type) - return {"success": True} - - -@app.get("/api/data/self_service_status") -async def api_self_service_status(cnx=Depends(get_database_connection)): - status = database_functions.functions.self_service_status(cnx, database_type) - # Return status directly without wrapping it in another dict - return status # Instead of {"status": status} - -class FirstAdminRequest(BaseModel): - username: str - password: str - email: str - fullname: str - - - -@app.post("/api/data/create_first") -async def create_first_admin( - request: FirstAdminRequest, - background_tasks: BackgroundTasks, - cnx=Depends(get_database_connection) -): - if database_functions.functions.check_admin_exists(cnx, database_type): - raise HTTPException( - status_code=403, - detail="An admin user already exists" - ) - try: - user_id = database_functions.functions.add_admin_user( - cnx, - database_type, - (request.fullname, request.username.lower(), request.email, request.password) - ) - - background_tasks.add_task(run_startup_tasks_background) - return {"message": "Admin user created successfully", "user_id": user_id} - except Exception as e: - raise HTTPException( - status_code=500, - detail=str(e) - ) - -def run_startup_tasks_background(): - cnx = create_database_connection() - try: - with open("/tmp/web_api_key.txt", "r") as f: - web_key = f.read().strip() - init_request = InitRequest(api_key=web_key) - # Execute startup tasks directly instead of calling the endpoint - is_valid = database_functions.functions.verify_api_key(cnx, database_type, web_key) - is_web_key = web_key == base_webkey.web_key - if not is_valid or not is_web_key: - raise Exception("Invalid web key") - database_functions.functions.add_news_feed_if_not_added(database_type, cnx) - except Exception as e: - logger.error(f"Background startup tasks failed: {e}") - finally: - close_database_connection(cnx) - -@app.put("/api/data/increment_listen_time/{user_id}") -async def api_increment_listen_time(user_id: int, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - # Allow the action if the API key belongs to the user, or it's the web API key - if key_id == user_id or is_web_key: - database_functions.functions.increment_listen_time(cnx, database_type, user_id) - return {"detail": "Listen time incremented."} - else: - raise HTTPException(status_code=403, - detail="You can only increment your own listen time.") - - -@app.put("/api/data/increment_played/{user_id}") -async def api_increment_played(user_id: int, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - # Allow the action if the API key belongs to the user or it's the web API key - if key_id == user_id or is_web_key: - database_functions.functions.increment_played(cnx, database_type, user_id) - return {"detail": "Played count incremented."} - else: - raise HTTPException(status_code=403, - detail="You can only increment your own play count.") - - -class RecordHistoryData(BaseModel): - episode_id: int - user_id: int - episode_pos: float - is_youtube: bool = False # Default to False for backward compatibility - -@app.post("/api/data/record_podcast_history") -async def api_record_podcast_history(data: RecordHistoryData, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if key_id == data.user_id or is_web_key: - database_functions.functions.record_podcast_history( - cnx, - database_type, - data.episode_id, - data.user_id, - data.episode_pos, - data.is_youtube - ) - return {"detail": "History recorded successfully."} - else: - raise HTTPException(status_code=403, - detail="You can only record history for yourself!") - - -class GetEpisodeIdRequest(BaseModel): - podcast_id: int - user_id: int - is_youtube: bool = False # Add default False - - -@app.post("/api/data/get_episode_id") -async def api_get_episode_id(data: GetEpisodeIdRequest, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission") - - episode_id = database_functions.functions.get_first_episode_id( - cnx, - database_type, - data.podcast_id, - data.user_id, - data.is_youtube - ) - - if episode_id is None: - raise HTTPException(status_code=404, detail="No episodes found for this podcast.") - return {"episode_id": episode_id} - - - -class DownloadPodcastData(BaseModel): - episode_id: int - user_id: int - is_youtube: bool = False # Default to False for backward compatibility - -@app.post("/api/data/download_podcast") -async def api_download_podcast( - data: DownloadPodcastData, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - """ - Queue a single episode or YouTube video for download. - This uses the Celery task queue to handle the download asynchronously. - """ - # Validate API key - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException( - status_code=403, - detail="Your API key is either invalid or does not have correct permission" - ) - # Check permissions - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - if key_id != data.user_id and not is_web_key: - raise HTTPException( - status_code=403, - detail="You can only download content for yourself!" - ) - try: - # Check if already downloaded - is_downloaded = database_functions.functions.check_downloaded( - cnx, - database_type, - data.user_id, - data.episode_id, - data.is_youtube - ) - if is_downloaded: - return {"detail": "Content already downloaded."} - # Queue the appropriate download task - if data.is_youtube: - task = database_functions.tasks.download_youtube_video_task.delay(data.episode_id, data.user_id, database_type) - content_type = "YouTube video" - else: - task = database_functions.tasks.download_podcast_task.delay(data.episode_id, data.user_id, database_type) - content_type = "Podcast episode" - return { - "detail": f"{content_type} download has been queued and will process in the background.", - "task_id": task.id - } - except Exception as e: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=f"Error queueing download: {str(e)}" - ) - -def download_content_fun(episode_id: int, user_id: int, is_youtube: bool): - cnx = create_database_connection() - try: - if is_youtube: - database_functions.functions.download_youtube_video(cnx, database_type, episode_id, user_id) - else: - database_functions.functions.download_podcast(cnx, database_type, episode_id, user_id) - finally: - cnx.close() - - -class DownloadAllPodcastData(BaseModel): - podcast_id: int - user_id: int - is_youtube: bool = False - -# Updated API endpoint using Celery for mass downloads -@app.post("/api/data/download_all_podcast") -async def api_download_all_podcast( - data: DownloadAllPodcastData, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - """ - Queue all episodes of a podcast or videos of a YouTube channel for download. - Uses a Celery task queue to process downloads in the background without blocking the server. - """ - # Validate API key - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException( - status_code=403, - detail="Your API key is either invalid or does not have correct permission" - ) - - # Check permissions - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if key_id != data.user_id and not is_web_key: - raise HTTPException( - status_code=403, - detail="You can only download content for yourself!" - ) - - try: - # Verify the podcast/channel exists - if data.is_youtube: - # Check if channel exists - videos = database_functions.functions.get_video_ids_for_podcast( - cnx, database_type, data.podcast_id - ) - if not videos: - return {"detail": "No videos found for the given YouTube channel."} - else: - # Check if podcast exists - episodes = database_functions.functions.get_episode_ids_for_podcast( - cnx, database_type, data.podcast_id - ) - if not episodes: - return {"detail": "No episodes found for the given podcast."} - - # Queue the download task using Celery - task = database_functions.tasks.queue_podcast_downloads.delay( - data.podcast_id, - data.user_id, - database_type, - data.is_youtube - ) - - return { - "detail": f"{'YouTube channel' if data.is_youtube else 'Podcast'} download has been queued. " - "Episodes will be downloaded in the background.", - "task_id": task.id - } - - except Exception as e: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=f"Error queueing downloads: {str(e)}" - ) - -@app.get("/api/data/download_status/{user_id}") -async def api_download_status( - user_id: int, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - """ - Get the status of all active downloads for a user. - """ - # Validate API key - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException( - status_code=403, - detail="Your API key is either invalid or does not have correct permission" - ) - - # Check permissions - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if key_id != user_id and not is_web_key: - raise HTTPException( - status_code=403, - detail="You can only view your own downloads!" - ) - - try: - # Get all active downloads for the user - downloads = database_functions.tasks.download_manager.get_user_downloads(user_id) - - # Return the downloads - return { - "downloads": downloads, - "count": len(downloads) - } - - except Exception as e: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=f"Error retrieving download status: {str(e)}" - ) - -class DeletePodcastData(BaseModel): - episode_id: int - user_id: int - is_youtube: bool = False # Default to False for backward compatibility - -@app.post("/api/data/delete_episode") -async def api_delete_podcast(data: DeletePodcastData, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if key_id == data.user_id or is_web_key: - database_functions.functions.delete_episode(database_type, cnx, data.episode_id, - data.user_id, data.is_youtube) - return {"detail": "Episode(s) Deleted"} - else: - raise HTTPException(status_code=403, - detail="You can only delete content for yourself!") - -class MarkEpisodeCompletedData(BaseModel): - episode_id: int - user_id: int - is_youtube: bool = False # Added field with default False - -@app.post("/api/data/mark_episode_completed") -async def api_mark_episode_completed(data: MarkEpisodeCompletedData, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - # Allow the action if the API key belongs to the user or it's the web API key - if key_id == data.user_id or is_web_key: - database_functions.functions.mark_episode_completed( - cnx, - database_type, - data.episode_id, - data.user_id, - data.is_youtube - ) - return {"detail": "Episode marked as completed."} - else: - raise HTTPException(status_code=403, - detail="You can only mark episodes as completed for yourself.") - -@app.post("/api/data/mark_episode_uncompleted") -async def api_mark_episode_uncompleted(data: MarkEpisodeCompletedData, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if key_id == data.user_id or is_web_key: - database_functions.functions.mark_episode_uncompleted( - cnx, - database_type, - data.episode_id, - data.user_id, - data.is_youtube - ) - return {"detail": "Episode marked as uncompleted."} - else: - raise HTTPException(status_code=403, - detail="You can only mark episodes as uncompleted for yourself.") - -class AutoDownloadRequest(BaseModel): - podcast_id: int - auto_download: bool - user_id: int - -@app.post("/api/data/enable_auto_download") -async def api_enable_auto_download(data: AutoDownloadRequest, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if key_id == data.user_id: - database_functions.functions.enable_auto_download(cnx, database_type, data.podcast_id, data.user_id, data.auto_download) - return {"detail": "Auto-download status updated."} - else: - raise HTTPException(status_code=403, detail="You can only modify your own podcasts.") - -class AutoDownloadStatusRequest(BaseModel): - podcast_id: int - user_id: int - -class AutoDownloadStatusResponse(BaseModel): - auto_download: bool - -@app.post("/api/data/get_auto_download_status") -async def api_get_auto_download_status(data: AutoDownloadStatusRequest, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission") - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - if key_id != data.user_id: - raise HTTPException(status_code=403, detail="You can only get the status for your own podcast.") - - status = database_functions.functions.call_get_auto_download_status(cnx, database_type, data.podcast_id, data.user_id) - if status is None: - raise HTTPException(status_code=404, detail="Podcast not found") - - return AutoDownloadStatusResponse(auto_download=status) - -class SkipTimesRequest(BaseModel): - podcast_id: int - start_skip: Optional[int] = 0 - end_skip: Optional[int] = 0 - user_id: int - -@app.post("/api/data/adjust_skip_times") -async def api_adjust_skip_times(data: SkipTimesRequest, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if key_id == data.user_id or is_web_key: - database_functions.functions.adjust_skip_times(cnx, database_type, data.podcast_id, data.start_skip, data.end_skip) - return {"detail": "Skip times updated."} - else: - raise HTTPException(status_code=403, detail="You can only modify your own podcasts.") - -class AutoSkipTimesRequest(BaseModel): - podcast_id: int - user_id: int - -class AutoSkipTimesResponse(BaseModel): - start_skip: int - end_skip: int - -@app.post("/api/data/get_auto_skip_times") -async def api_get_auto_skip_times(data: AutoSkipTimesRequest, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission") - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - if key_id != data.user_id: - raise HTTPException(status_code=403, detail="You can only get the skip times for your own podcast.") - - start_skip, end_skip = database_functions.functions.get_auto_skip_times(cnx, database_type, data.podcast_id, data.user_id) - if start_skip is None or end_skip is None: - raise HTTPException(status_code=404, detail="Podcast not found") - - return AutoSkipTimesResponse(start_skip=start_skip, end_skip=end_skip) - -class PlayEpisodeDetailsRequest(BaseModel): - podcast_id: int - user_id: int - is_youtube: bool = False - -class PlayEpisodeDetailsResponse(BaseModel): - playback_speed: float - start_skip: int - end_skip: int - -@app.post("/api/data/get_play_episode_details") -async def api_get_play_episode_details(data: PlayEpisodeDetailsRequest, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if key_id == data.user_id or is_web_key: - # Get all details in one function call - playback_speed, start_skip, end_skip = database_functions.functions.get_play_episode_details( - cnx, - database_type, - data.user_id, - data.podcast_id, - data.is_youtube - ) - - return PlayEpisodeDetailsResponse( - playback_speed=playback_speed, - start_skip=start_skip, - end_skip=end_skip - ) - else: - raise HTTPException(status_code=403, - detail="You can only get metadata for yourself!") - -class ClearPlaybackSpeedRequest(BaseModel): - podcast_id: int - user_id: int - -@app.post("/api/data/clear_podcast_playback_speed") -async def api_clear_podcast_playback_speed(data: ClearPlaybackSpeedRequest, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - if key_id != data.user_id: - raise HTTPException(status_code=403, - detail="You can only modify your own podcast settings!") - - success = database_functions.functions.clear_podcast_playback_speed( - cnx, - database_type, - data.podcast_id, - data.user_id - ) - - if success: - return {"message": "Playback speed cleared successfully"} - else: - raise HTTPException(status_code=500, detail="Failed to clear playback speed") - -class SetPlaybackSpeedPodcast(BaseModel): - user_id: int - podcast_id: int - playback_speed: float - -class SetPlaybackSpeedUser(BaseModel): - user_id: int - playback_speed: float - -@app.post("/api/data/podcast/set_playback_speed") -async def api_set_playback_speed_podcast(data: SetPlaybackSpeedPodcast, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission") - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - if key_id == data.user_id or is_web_key: - database_functions.functions.set_playback_speed_podcast(cnx, database_type, data.podcast_id, data.playback_speed) - return {"detail": "Default podcast playback speed updated."} - else: - raise HTTPException(status_code=403, detail="You can only modify your own podcasts.") - -@app.post("/api/data/user/set_playback_speed") -async def api_set_playback_speed_user(data: SetPlaybackSpeedUser, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission") - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - if key_id == data.user_id or is_web_key: - database_functions.functions.set_playback_speed_user(cnx, database_type, data.user_id, data.playback_speed) - return {"detail": "Default playback speed updated."} - else: - raise HTTPException(status_code=403, detail="You can only modify your own settings.") - - -class SaveEpisodeData(BaseModel): - episode_id: int - user_id: int - is_youtube: bool = False - -@app.post("/api/data/save_episode") -async def api_save_episode(data: SaveEpisodeData, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if key_id == data.user_id or is_web_key: - ep_status = database_functions.functions.check_saved( - cnx, database_type, data.user_id, data.episode_id, data.is_youtube - ) - if ep_status: - return {"detail": f"{'Video' if data.is_youtube else 'Episode'} already saved."} - else: - success = database_functions.functions.save_episode( - cnx, database_type, data.episode_id, data.user_id, data.is_youtube - ) - if success: - return {"detail": f"{'Video' if data.is_youtube else 'Episode'} saved!"} - else: - raise HTTPException(status_code=400, detail=f"Error saving {'video' if data.is_youtube else 'episode'}.") - else: - raise HTTPException(status_code=403, - detail=f"You can only save {'videos' if data.is_youtube else 'episodes'} of your own!") - -class RemoveSavedEpisodeData(BaseModel): - episode_id: int - user_id: int - is_youtube: bool = False - -@app.post("/api/data/remove_saved_episode") -async def api_remove_saved_episode(data: RemoveSavedEpisodeData, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if is_valid_key: - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - if key_id == data.user_id: - database_functions.functions.remove_saved_episode( - cnx, database_type, data.episode_id, data.user_id, data.is_youtube - ) - return {"detail": f"Saved {'video' if data.is_youtube else 'episode'} removed."} - else: - raise HTTPException(status_code=403, - detail=f"You can only remove {'videos' if data.is_youtube else 'episodes'} of your own!") - else: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - -class AddCategoryData(BaseModel): - podcast_id: int - user_id: int - category: str - -@app.post("/api/data/add_category") -async def api_add_category(data: AddCategoryData, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - # Allow the action if the API key belongs to the user or it's the web API key - if key_id == data.user_id or is_web_key: - existing_categories = database_functions.functions.get_categories(cnx, database_type, data.podcast_id, data.user_id) - if data.category in existing_categories: - return {"detail": "Category already exists."} - else: - success = database_functions.functions.add_category(cnx, database_type, data.podcast_id, data.user_id, data.category) - if success: - return {"detail": "Category added!"} - else: - raise HTTPException(status_code=400, detail="Error adding category.") - else: - raise HTTPException(status_code=403, detail="You can only modify categories of your own podcasts!") - -class RemoveCategoryData(BaseModel): - podcast_id: int - user_id: int - category: str - -@app.post("/api/data/remove_category") -async def api_remove_category(data: RemoveCategoryData, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if is_valid_key: - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - if key_id == data.user_id: - database_functions.functions.remove_category(cnx, database_type, data.podcast_id, data.user_id, data.category) - return {"detail": "Category removed."} - else: - raise HTTPException(status_code=403, - detail="You can only modify categories of your own podcasts!") - else: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - -class UpdateFeedCutoffDaysData(BaseModel): - podcast_id: int - user_id: int - feed_cutoff_days: int - -@app.post("/api/data/update_feed_cutoff_days") -async def api_update_feed_cutoff_days(data: UpdateFeedCutoffDaysData, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - # Allow the action if the API key belongs to the user or it's the web API key - if key_id == data.user_id or is_web_key: - success = database_functions.functions.update_feed_cutoff_days(cnx, database_type, data.podcast_id, data.user_id, data.feed_cutoff_days) - if success: - return {"detail": "Feed cutoff days updated successfully!"} - else: - raise HTTPException(status_code=400, detail="Error updating feed cutoff days.") - else: - raise HTTPException(status_code=403, detail="You can only modify settings of your own podcasts!") - -@app.get("/api/data/get_feed_cutoff_days") -async def api_get_feed_cutoff_days(podcast_id: int, user_id: int, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - # Allow the action if the API key belongs to the user or it's the web API key - if key_id == user_id or is_web_key: - feed_cutoff_days = database_functions.functions.get_feed_cutoff_days(cnx, database_type, podcast_id, user_id) - if feed_cutoff_days is not None: - return {"podcast_id": podcast_id, "user_id": user_id, "feed_cutoff_days": feed_cutoff_days} - else: - raise HTTPException(status_code=404, detail="Podcast not found or does not belong to the user.") - else: - raise HTTPException(status_code=403, detail="You can only access settings of your own podcasts!") - -class TogglePodcastNotificationData(BaseModel): - user_id: int - podcast_id: int - enabled: bool - -@app.put("/api/data/podcast/toggle_notifications") -async def api_toggle_podcast_notifications( - data: TogglePodcastNotificationData, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Invalid API key") - - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if key_id == data.user_id or is_web_key: - success = database_functions.functions.toggle_podcast_notifications( - cnx, - database_type, - data.podcast_id, - data.user_id, - data.enabled - ) - if success: - return {"detail": "Podcast notification settings updated successfully"} - else: - raise HTTPException(status_code=400, detail="Error updating podcast notification settings") - else: - raise HTTPException(status_code=403, detail="You can only modify your own podcast settings") - -class SetPodcastFeedCutoff(BaseModel): - user_id: int - podcast_id: int - feed_cutoff: int - -@app.put("/api/data/podcast/set_feed_cutoff") -async def api_toggle_podcast_notifications( - data: SetPodcastFeedCutoff, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Invalid API key") - - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if key_id == data.user_id or is_web_key: - success = database_functions.functions.set_feed_cutoff( - cnx, - database_type, - data.podcast_id, - data.user_id, - data.feed_cutoff - ) - if success: - return {"detail": "Podcast feed cutoff setting updated successfully"} - else: - raise HTTPException(status_code=400, detail="Error updating podcast nfeed cutoff") - else: - raise HTTPException(status_code=403, detail="You can only modify your own podcast settings") - -class PodcastNotificationStatusData(BaseModel): - user_id: int - podcast_id: int - -@app.post("/api/data/podcast/notification_status") -async def api_get_podcast_notification_status( - data: PodcastNotificationStatusData, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Invalid API key") - - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if key_id == data.user_id or is_web_key: - enabled = database_functions.functions.get_podcast_notification_status( - cnx, - database_type, - data.podcast_id, - data.user_id - ) - return {"enabled": enabled} - else: - raise HTTPException(status_code=403, detail="You can only check your own podcast settings") - -class NotificationSettingsData(BaseModel): - user_id: int - platform: str - enabled: bool - ntfy_topic: Optional[str] - ntfy_server_url: Optional[str] - gotify_url: Optional[str] - gotify_token: Optional[str] - -@app.get("/api/data/user/notification_settings") -async def api_get_notification_settings(user_id: int, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Invalid API key") - - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if key_id == user_id or is_web_key: - settings = database_functions.functions.get_notification_settings(cnx, database_type, user_id) - return {"settings": settings} - else: - raise HTTPException(status_code=403, detail="You can only access your own notification settings") - -@app.put("/api/data/user/notification_settings") -async def api_update_notification_settings(data: NotificationSettingsData, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Invalid API key") - - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if key_id == data.user_id or is_web_key: - success = database_functions.functions.update_notification_settings( - cnx, - database_type, - data.user_id, - data.platform, - data.enabled, - data.ntfy_topic, - data.ntfy_server_url, - data.gotify_url, - data.gotify_token - ) - if success: - return {"detail": "Notification settings updated successfully"} - else: - raise HTTPException(status_code=400, detail="Error updating notification settings") - else: - raise HTTPException(status_code=403, detail="You can only modify your own notification settings") - -class NotificationTestRequest(BaseModel): - user_id: int - platform: str - -@app.post("/api/data/user/test_notification") -async def api_test_notification( - data: NotificationTestRequest, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Invalid API key") - - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if key_id == data.user_id or is_web_key: - success = database_functions.functions.send_test_notification( - cnx, - database_type, - data.user_id, - data.platform - ) - if success: - return {"detail": "Test notification sent successfully"} - else: - raise HTTPException(status_code=400, detail="Error sending test notification") - else: - raise HTTPException(status_code=403, detail="You can only send test notifications to your own account") - -class RecordListenDurationData(BaseModel): - episode_id: int - user_id: int - listen_duration: float - is_youtube: Optional[bool] = False - - -@app.post("/api/data/record_listen_duration") -async def get(data: RecordListenDurationData, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - # Ignore listen duration for episodes with ID 0 - if data.episode_id == 0: - return {"detail": "Listen duration for episode ID 0 is ignored."} - - # Continue as normal for all other episode IDs - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if key_id == data.user_id or is_web_key: - if data.is_youtube: - database_functions.functions.record_youtube_listen_duration(cnx, database_type, data.episode_id, data.user_id, data.listen_duration) - else: - database_functions.functions.record_listen_duration(cnx, database_type, data.episode_id, data.user_id, data.listen_duration) - return {"detail": "Listen duration recorded."} - else: - raise HTTPException(status_code=403, detail="You can only record your own listen duration") - - -@app.get("/api/data/refresh_pods") -async def api_refresh_pods(background_tasks: BackgroundTasks, is_admin: bool = Depends(check_if_admin)): - background_tasks.add_task(refresh_pods_task) - return {"detail": "Refresh initiated."} - -def refresh_pods_task(): - cnx = create_database_connection() - try: - database_functions.functions.refresh_pods(cnx, database_type) - finally: - close_database_connection(cnx) - -# Store locks per user to prevent concurrent refresh jobs -user_locks = {} - -# Store active WebSocket connections -active_websockets = {} - -@app.websocket("/ws/api/data/episodes/{user_id}") -async def websocket_endpoint(websocket: WebSocket, user_id: int, cnx=Depends(get_database_connection), nextcloud_refresh: bool = Query(False), api_key: str = Query(None)): - await websocket.accept() - try: - print(f"User {user_id} connected to WebSocket") - # Validate the API key - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - await websocket.send_json({"detail": "Invalid API key or insufficient permissions"}) - await websocket.close() - return - # Continue as normal for all other episode IDs - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - print(f"User ID: {user_id}, Key ID: {key_id}, Web Key: {is_web_key}") - if key_id != user_id and not is_web_key: - await websocket.send_json({"detail": "You can only refresh your own podcasts"}) - await websocket.close() - return - if user_id in user_locks: - await websocket.send_json({"detail": "Refresh job already running for this user."}) - await websocket.close() - return - if user_id not in active_websockets: - active_websockets[user_id] = [] - print(f"Active WebSockets: {active_websockets}") - active_websockets[user_id].append(websocket) - # Create a lock for the user and start the refresh task - user_locks[user_id] = Lock() - try: - # Acquire the lock - user_locks[user_id].acquire() - print(f"Acquired lock for user {user_id}") - # Run the refresh process asynchronously without blocking the WebSocket - task = asyncio.create_task(run_refresh_process(user_id, nextcloud_refresh, websocket, cnx)) - print(f"Task created for user {user_id}") - # Keep the WebSocket connection alive while the task is running - while not task.done(): - try: - await asyncio.wait_for(websocket.receive_text(), timeout=1.0) - except asyncio.TimeoutError: - # This is expected, we're just using it to keep the connection alive - pass - except Exception as e: - print(f"WebSocket disconnected: {str(e)}. Cancelling task.") - task.cancel() - break - except Exception as e: - await websocket.send_json({"detail": f"Error: {str(e)}"}) - finally: - # Always release the lock and clean up - user_locks[user_id].release() - del user_locks[user_id] - if user_id in active_websockets: - active_websockets[user_id].remove(websocket) - if not active_websockets[user_id]: - del active_websockets[user_id] - # For the WebSocket dependency, use the proper function - close_database_connection(cnx) - await websocket.close() - except Exception as e: - # Handle any unexpected errors - await websocket.send_json({"detail": f"Unexpected error: {str(e)}"}) - await websocket.close() - -async def run_refresh_process(user_id, nextcloud_refresh, websocket, cnx): - print("Starting refresh process") - print(f"Running refresh process for user in job {user_id}") - try: - # First get total count of podcasts - cursor = cnx.cursor() - if database_type == "postgresql": - cursor.execute(''' - SELECT COUNT(*), array_agg("podcastname") - FROM "Podcasts" - WHERE "userid" = %s - ''', (user_id,)) - else: - cursor.execute(''' - SELECT COUNT(*), GROUP_CONCAT(PodcastName) - FROM Podcasts - WHERE UserID = %s - ''', (user_id,)) - count_result = cursor.fetchone() - # Handle both dictionary and tuple results - if isinstance(count_result, dict): - total_podcasts = count_result['count'] if count_result else 0 - else: - total_podcasts = count_result[0] if count_result else 0 - await websocket.send_json({ - "progress": { - "current": 0, - "total": total_podcasts, - "current_podcast": "" - } - }) - - # Get default device information for sync - default_device_id = database_functions.functions.get_or_create_default_device(cnx, database_type, user_id) - default_device_name = None - - if default_device_id: - # Get the device name - device_cursor = cnx.cursor() - if database_type == "postgresql": - device_query = 'SELECT DeviceName FROM "GpodderDevices" WHERE DeviceID = %s' - else: - device_query = "SELECT DeviceName FROM GpodderDevices WHERE DeviceID = %s" - - device_cursor.execute(device_query, (default_device_id,)) - device_result = device_cursor.fetchone() - device_cursor.close() - - if device_result: - default_device_name = device_result[0] if isinstance(device_result, tuple) else device_result["devicename"] - print(f"Using default device for sync: {default_device_name} (ID: {default_device_id})") - else: - print("Default device ID found but no name - will use automatic fallback") - else: - print("No default device found - will use automatic fallback") - - if nextcloud_refresh: - await websocket.send_json({"detail": "Refreshing Nextcloud subscriptions..."}) - print(f"Refreshing Nextcloud subscriptions for user {user_id}") - gpodder_url, gpodder_token, gpodder_login = database_functions.functions.get_nextcloud_settings(database_type, cnx, user_id) - pod_sync_type = database_functions.functions.get_gpodder_type(cnx, database_type, user_id) - if pod_sync_type == "nextcloud": - await asyncio.to_thread(database_functions.functions.refresh_nextcloud_subscription, - database_type, cnx, user_id, gpodder_url, gpodder_token, gpodder_login, pod_sync_type, - default_device_id, default_device_name, False) - else: - await asyncio.to_thread(database_functions.functions.refresh_gpodder_subscription, - database_type, cnx, user_id, gpodder_url, gpodder_token, gpodder_login, pod_sync_type, - default_device_id, default_device_name, False) - await websocket.send_json({"detail": "Pod Sync subscription refresh complete."}) - # Get list of podcast names for progress updates - print('Getting list') - if database_type == "postgresql": - cursor.execute(''' - SELECT "podcastid", "podcastname", "feedurl", "artworkurl", "autodownload", - "username", "password", "isyoutubechannel", "feedcutoffdays" - FROM "Podcasts" - WHERE "userid" = %s - ''', (user_id,)) - else: - cursor.execute(''' - SELECT PodcastID, PodcastName, FeedURL, ArtworkURL, AutoDownload, - Username, Password, IsYouTubeChannel, FeedCutoffDays - FROM Podcasts - WHERE UserID = %s - ''', (user_id,)) - podcasts = cursor.fetchall() - print('got list') - - # Process each podcast - current = 0 - for podcast in podcasts: - current += 1 - if isinstance(podcast, dict): - if database_type == "postgresql": - podcast_id = podcast['podcastid'] - podcast_name = podcast['podcastname'] - feed_url = podcast['feedurl'] - artwork_url = podcast['artworkurl'] - auto_download = podcast['autodownload'] - username = podcast['username'] - password = podcast['password'] - is_youtube = podcast['isyoutubechannel'] - feed_cutoff = podcast['feedcutoffdays'] - else: - podcast_id = podcast['PodcastID'] - podcast_name = podcast['PodcastName'] - feed_url = podcast['FeedURL'] - artwork_url = podcast['ArtworkURL'] - auto_download = podcast['AutoDownload'] - username = podcast['Username'] - password = podcast['Password'] - is_youtube = podcast['IsYouTubeChannel'] - feed_cutoff = podcast['FeedCutoffDays'] - else: - podcast_id, podcast_name, feed_url, artwork_url, auto_download, username, password, is_youtube, feed_cutoff = podcast - - await websocket.send_json({ - "progress": { - "current": current, - "total": total_podcasts, - "current_podcast": podcast_name - } - }) - - # Refresh this podcast - # print(f'is it youtube?: {is_youtube}') - try: - if is_youtube is True: - # Extract channel ID from feed URL - channel_id = feed_url.split('channel/')[-1] if 'channel/' in feed_url else feed_url - channel_id = channel_id.split('/')[0].split('?')[0] - youtube_episodes = await asyncio.to_thread( - database_functions.youtube.process_youtube_videos, - database_type, - podcast_id, - channel_id, - cnx, - feed_cutoff - ) - if youtube_episodes: - for episode in youtube_episodes: - if user_id in active_websockets: - for ws in active_websockets[user_id]: - await ws.send_json({"new_episode": episode}) - else: - episodes = await asyncio.to_thread( - database_functions.functions.add_episodes, - cnx, - database_type, - podcast_id, - feed_url, - artwork_url, - auto_download, - username, - password, - True # websocket - ) - - if episodes: - for episode in episodes: - if user_id in active_websockets: - for ws in active_websockets[user_id]: - await ws.send_json({"new_episode": episode}) - except Exception as e: - print(f"Error refreshing podcast {podcast_id}: {str(e)}") - continue - - except Exception as e: - await websocket.send_json({"detail": f"Error during refresh: {e}"}) - finally: - # Clear explicit reference - if cnx: - try: - # Get connection type - connection_type = type(cnx).__name__ - print(f"Closing connection of type: {connection_type}") - - # For PooledMySQLConnection - if connection_type == "PooledMySQLConnection": - print("Detected PooledMySQLConnection - using special handling") - # DO NOTHING - don't try to close or modify it - # Just let it go out of scope and be garbage collected - pass - # Regular MySQL connection - elif "MySQL" in connection_type: - print("Detected MySQL connection - using basic close") - try: - cnx.close() - except Exception as e: - print(f"MySQL close error (ignored): {e}") - # PostgreSQL connection - elif hasattr(cnx, 'closed'): - print("Detected PostgreSQL connection") - if not cnx.closed: - cnx.close() - # Generic - elif hasattr(cnx, 'close'): - print("Using generic close method") - cnx.close() - - print("Connection handling complete") - except Exception as e: - print(f"Connection handling error: {e}") - - # Force drop reference regardless of what happened above - cnx = None - - # Force garbage collection - import gc - gc.collect() - print("Garbage collection complete") - -@app.get("/api/data/get_stats") -async def api_get_stats(user_id: int, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - logging.info('Fetching API key') - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - logging.info('Getting key ID') - logger.info(f'id {user_id}') - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - logging.info(f'Got key ID: {key_id}') - - # Allow the action if the API key belongs to the user or it's the web API key - if key_id == user_id or is_web_key: - stats = database_functions.functions.get_stats(cnx, database_type, user_id) - logging.info('Got stats') - if stats is None: - raise HTTPException(status_code=404, detail="Stats not found for the given user ID") - return stats - else: - raise HTTPException(status_code=403, detail="You can only get stats for your own account.") - - - -@app.get("/api/data/get_user_episode_count") -async def api_get_user_episode_count(user_id: int, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - - if not is_valid_key: - logging.error(f"not valid key") - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - elevated_access = await has_elevated_access(api_key, cnx) - - if not elevated_access: - # Get user ID from API key - user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if user_id != user_id_from_api_key: - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, - detail="You are not authorized to access these user details") - episode_count = database_functions.functions.get_user_episode_count(cnx, database_type, user_id) - if episode_count: - return episode_count - else: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found") - - -@app.get("/api/data/get_user_info") -async def api_get_user_info(is_admin: bool = Depends(check_if_admin), cnx=Depends(get_database_connection)): - user_info = database_functions.functions.get_user_info(database_type, cnx) - return user_info - -@app.get("/api/data/my_user_info/{user_id}") -async def api_get_my_user_info( - user_id: int, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - try: - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException( - status_code=403, - detail="Your API key is either invalid or does not have correct permission" - ) - - # Check if the API key belongs to the requested user_id - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - is_web_key = api_key == base_webkey.web_key - - if key_id != user_id and not is_web_key: - raise HTTPException( - status_code=403, - detail="You can only retrieve your own user information!" - ) - - user_info = database_functions.functions.get_my_user_info(database_type, cnx, user_id) - if not user_info: - raise HTTPException(status_code=404, detail="User not found") - - return user_info - - except HTTPException: - raise - except Exception as e: - logging.error(f"Error in api_get_my_user_info: {str(e)}") - raise HTTPException(status_code=500, detail="An error occurred while retrieving user information") - -@app.get("/api/data/check_podcast", response_model=Dict[str, bool]) -async def api_check_podcast( - user_id: int, - podcast_name: str, - podcast_url: str, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if is_valid_key: - exists = database_functions.functions.check_podcast(cnx, database_type, user_id, podcast_name, podcast_url) - return {"exists": exists} - else: - raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission") - -@app.get("/api/data/check_youtube_channel", response_model=Dict[str, bool]) -async def api_check_youtube_channel( - user_id: int, - channel_name: str, - channel_url: str, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if is_valid_key: - exists = database_functions.functions.check_youtube_channel( - cnx, database_type, user_id, channel_name, channel_url - ) - return {"exists": exists} - else: - raise HTTPException( - status_code=403, - detail="Your API key is either invalid or does not have correct permission" - ) - - -@app.get("/api/data/user_admin_check/{user_id}") -async def api_user_admin_check_route(user_id: int, api_key: str = Depends(get_api_key_from_header), - cnx=Depends(get_database_connection)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - elevated_access = await has_elevated_access(api_key, cnx) - if not elevated_access: - # Get user ID from API key - user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if user_id != user_id_from_api_key: - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, - detail="You are not authorized to check admin status for other users") - is_admin = await run_in_threadpool(database_functions.functions.user_admin_check, cnx, database_type, user_id) - return {"is_admin": is_admin} - -class RemoveYouTubeChannelData(BaseModel): - user_id: int - channel_name: str - channel_url: str - -@app.post("/api/data/remove_youtube_channel") -async def api_remove_youtube_channel_route( - data: RemoveYouTubeChannelData = Body(...), - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException( - status_code=403, - detail="Your API key is either invalid or does not have correct permission" - ) - - elevated_access = await has_elevated_access(api_key, cnx) - if not elevated_access: - user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - if data.user_id != user_id_from_api_key: - raise HTTPException( - status_code=status.HTTP_403_FORBIDDEN, - detail="You are not authorized to remove channels for other users" - ) - - database_functions.functions.remove_youtube_channel_by_url( - cnx, database_type, data.channel_name, data.channel_url, data.user_id - ) - return {"success": True} - -class RemovePodcastData(BaseModel): - user_id: int - podcast_name: str - podcast_url: str - - -@app.post("/api/data/remove_podcast") -async def api_remove_podcast_route(data: RemovePodcastData = Body(...), cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - elevated_access = await has_elevated_access(api_key, cnx) - if not elevated_access: - # Get user ID from API key - user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - if data.user_id != user_id_from_api_key: - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, - detail="You are not authorized to remove podcasts for other users") - - # First, get the podcast ID and check if it's a YouTube channel - podcast_id = database_functions.functions.get_podcast_id(database_type, cnx, data.user_id, data.podcast_url, data.podcast_name) - - if podcast_id is None: - raise HTTPException(status_code=404, detail="Podcast not found") - - # Check if this is a YouTube channel - is_youtube = database_functions.functions.check_youtube_channel_id(cnx, database_type, podcast_id) - - # Track if episodes have been handled - episodes_handled = False - - if database_functions.functions.check_gpodder_settings(database_type, cnx, data.user_id): - logging.info('get cloud vals') - gpodder_url, gpodder_token, gpodder_login = database_functions.functions.get_nextcloud_settings(database_type, cnx, data.user_id) - - # Get the full gpodder settings to check URL - gpodder_settings = database_functions.functions.get_gpodder_settings(database_type, cnx, data.user_id) - - logging.info('em cloud') - podcast_feed = database_functions.functions.get_podcast_feed_by_id(cnx, database_type, podcast_id) - gpod_type = database_functions.functions.get_gpodder_type(cnx, database_type, data.user_id) - - # Get the correct device name, matching what we do in add_podcast - device_name = f"pinepods-internal-{data.user_id}" # Default device name - if gpod_type == "gpodder": - default_device = database_functions.functions.get_default_gpodder_device(cnx, database_type, data.user_id) - if default_device: - device_name = default_device["name"] - - if gpod_type == "nextcloud": - database_functions.functions.remove_podcast_from_nextcloud(cnx, database_type, gpodder_url, gpodder_login, gpodder_token, podcast_feed) - else: - # Modified return value includes whether episodes were handled - success, episodes_handled = database_functions.functions.remove_podcast_from_opodsync( - cnx, database_type, data.user_id, gpodder_url, gpodder_login, - gpodder_token, podcast_feed, device_name - ) - - # Only run the appropriate remove function if episodes weren't already handled by gpodder sync - if not episodes_handled: - if is_youtube: - database_functions.functions.remove_youtube_channel(cnx, database_type, podcast_id, data.user_id) - else: - database_functions.functions.remove_podcast(cnx, database_type, data.podcast_name, data.podcast_url, data.user_id) - else: - logging.info('skipping remove - already handled by gpodder sync') - - return {"success": True} - -class RemovePodcastIDData(BaseModel): - user_id: int - podcast_id: int - is_youtube: bool = False - -@app.post("/api/data/remove_podcast_id") -async def api_remove_podcast_route_id(data: RemovePodcastIDData = Body(...), - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - elevated_access = await has_elevated_access(api_key, cnx) - if not elevated_access: - user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - if data.user_id != user_id_from_api_key: - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, - detail="You are not authorized to remove content for other users") - if data.is_youtube: - database_functions.functions.remove_youtube_channel(cnx, database_type, data.podcast_id, data.user_id) - else: - # Existing podcast removal logic - logging.info('check gpod') - episodes_handled = False # Track whether episodes were already handled by gpodder sync - - if database_functions.functions.check_gpodder_settings(database_type, cnx, data.user_id): - logging.info('get cloud vals') - gpodder_url, gpodder_token, gpodder_login = database_functions.functions.get_nextcloud_settings(database_type, cnx, data.user_id) - - # Get the full gpodder settings to check URL - gpodder_settings = database_functions.functions.get_gpodder_settings(database_type, cnx, data.user_id) - - logging.info('em cloud') - podcast_feed = database_functions.functions.get_podcast_feed_by_id(cnx, database_type, data.podcast_id) - gpod_type = database_functions.functions.get_gpodder_type(cnx, database_type, data.user_id) - - # Get the correct device name, matching what we do in add_podcast - device_name = f"pinepods-internal-{data.user_id}" # Default device name - if gpod_type == "gpodder": - default_device = database_functions.functions.get_default_gpodder_device(cnx, database_type, data.user_id) - if default_device: - device_name = default_device["name"] - - if gpod_type == "nextcloud": - database_functions.functions.remove_podcast_from_nextcloud(cnx, database_type, gpodder_url, gpodder_login, gpodder_token, podcast_feed) - else: - # Modified return value includes whether episodes were handled - success, episodes_handled = database_functions.functions.remove_podcast_from_opodsync( - cnx, database_type, data.user_id, gpodder_url, gpodder_login, - gpodder_token, podcast_feed, device_name - ) - - # Only run remove_podcast_id if episodes weren't already handled by gpodder sync - if not episodes_handled: - logging.info('rm pod id') - database_functions.functions.remove_podcast_id(cnx, database_type, data.podcast_id, data.user_id) - else: - logging.info('skipping rm pod id - already handled by gpodder sync') - - return {"success": True} - - -@app.get("/api/data/return_pods/{user_id}") -async def api_return_pods(user_id: int, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - try: - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if key_id == user_id or is_web_key: - pods = database_functions.functions.return_pods(database_type, cnx, user_id) - - # Return empty list if no podcasts found - if not pods: - return {"pods": []} - - # Filter out any None values that might have slipped through - cleaned_pods = [] - for pod in pods: - if pod and isinstance(pod, dict): - cleaned_pod = { - k: v if v is not None else "" - for k, v in pod.items() - } - cleaned_pods.append(cleaned_pod) - - return {"pods": cleaned_pods} - else: - raise HTTPException(status_code=403, - detail="You can only return pods for yourself!") - - except Exception as e: - logging.error(f"Error in api_return_pods: {str(e)}") - return {"pods": [], "error": "An error occurred while retrieving podcasts"} - -@app.get("/api/data/user_history/{user_id}") -async def api_user_history(user_id: int, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - # Allow the action if the API key belongs to the user or it's the web API key - if key_id == user_id or is_web_key: - history = database_functions.functions.user_history(cnx, database_type, user_id) - return {"data": history} - else: - raise HTTPException(status_code=403, - detail="You can only return history for yourself!") - - - -@app.get("/api/data/saved_episode_list/{user_id}") -async def api_saved_episode_list(user_id: int, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - # Allow the action if the API key belongs to the user or it's the web API key - if key_id == user_id or is_web_key: - saved_episodes = database_functions.functions.saved_episode_list(database_type, cnx, user_id) - return {"saved_episodes": saved_episodes} - else: - raise HTTPException(status_code=403, - detail="You can only return saved episodes for yourself!") - - -@app.get("/api/data/download_episode_list") -async def api_download_episode_list(cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header), - user_id: int = Query(...)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - # Allow the action if the API key belongs to the user or it's the web API key - if key_id == user_id or is_web_key: - downloaded_episodes = database_functions.functions.download_episode_list(database_type, cnx, user_id) - return {"downloaded_episodes": downloaded_episodes} - else: - raise HTTPException(status_code=403, - detail="You can only return downloaded episodes for yourself!") - - -class UserValues(BaseModel): - fullname: str - username: str - email: str - hash_pw: str - -@app.post("/api/data/add_user") -async def api_add_user(is_admin: bool = Depends(check_if_admin), - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header), - user_values: UserValues = Body(...)): - try: - user_id = database_functions.functions.add_user(cnx, database_type, ( - user_values.fullname, user_values.username.lower(), user_values.email, user_values.hash_pw)) - - if not user_id: - raise HTTPException( - status_code=500, - detail="Failed to create user - no user ID returned" - ) - - return {"detail": "Success", "user_id": user_id} - - except psycopg.errors.UniqueViolation as e: - error_detail = str(e) - if "Users_username_key" in error_detail: - raise HTTPException( - status_code=409, - detail="This username is already taken. Please choose a different username." - ) - elif "Users_email_key" in error_detail: - raise HTTPException( - status_code=409, - detail="This email address is already registered. Please use a different email." - ) - else: - raise HTTPException( - status_code=409, - detail="A conflict occurred while creating the user. Please try again with different credentials." - ) - - except psycopg.errors.OperationalError as e: - logging.error(f"Database operational error: {str(e)}") - raise HTTPException( - status_code=503, - detail="Unable to connect to the database. Please try again later." - ) - - except mysql.connector.errors.IntegrityError as e: - error_msg = str(e) - if "Duplicate entry" in error_msg and "username" in error_msg.lower(): - raise HTTPException( - status_code=409, - detail="This username is already taken. Please choose a different username." - ) - elif "Duplicate entry" in error_msg and "email" in error_msg.lower(): - raise HTTPException( - status_code=409, - detail="This email address is already registered. Please use a different email." - ) - else: - raise HTTPException( - status_code=409, - detail="A conflict occurred while creating the user. Please try again with different credentials." - ) - - except Exception as e: - logging.error(f"Unexpected error adding user: {str(e)}") - raise HTTPException( - status_code=500, - detail=f"An unexpected error occurred while creating the user: {str(e)}" - ) - - -@app.post("/api/data/add_login_user") -async def api_add_user(cnx=Depends(get_database_connection), - user_values: UserValues = Body(...)): - try: - self_service = database_functions.functions.check_self_service(cnx, database_type) - if not self_service: - raise HTTPException( - status_code=403, - detail="Your API key is either invalid or does not have correct permission" - ) - - user_id = database_functions.functions.add_user(cnx, database_type, ( - user_values.fullname, user_values.username.lower(), user_values.email, user_values.hash_pw)) - - if not user_id: - raise HTTPException( - status_code=500, - detail="Failed to create user account - no user ID returned" - ) - - return {"detail": "User added successfully", "user_id": user_id} - - except UniqueViolation as e: - error_detail = str(e) - if "Users_username_key" in error_detail: - raise HTTPException( - status_code=409, - detail="This username is already taken. Please choose a different username." - ) - elif "Users_email_key" in error_detail: - raise HTTPException( - status_code=409, - detail="This email address is already registered. Please use a different email." - ) - else: - raise HTTPException( - status_code=409, - detail="A conflict occurred while creating the user. Please try again with different credentials." - ) - - except OperationalError as e: - logging.error(f"Database operational error: {str(e)}") - raise HTTPException( - status_code=503, - detail="Unable to connect to the database. Please try again later." - ) - - except Exception as e: - logging.error(f"Unexpected error adding user: {str(e)}") - raise HTTPException( - status_code=500, - detail=f"An unexpected error occurred while creating your account: {str(e)}" - ) - -@app.put("/api/data/set_fullname/{user_id}") -async def api_set_fullname(user_id: int, new_name: str = Query(...), cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - elevated_access = await has_elevated_access(api_key, cnx) - - if not elevated_access: - # Get user ID from API key - user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if user_id != user_id_from_api_key: - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, - detail="You are not authorized to access these user details") - try: - database_functions.functions.set_fullname(cnx, database_type, user_id, new_name) - return {"detail": "Fullname updated."} - except: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found") - - -class PasswordUpdateRequest(BaseModel): - hash_pw: str - -@app.put("/api/data/set_password/{user_id}") -async def api_set_password( - user_id: int, - request: PasswordUpdateRequest, # Use the Pydantic model - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - hash_pw = request.hash_pw # Extract the hash_pw from the request model - - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - - if not is_valid_key: - raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission") - - elevated_access = await has_elevated_access(api_key, cnx) - - if not elevated_access: - user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, api_key) - - if user_id != user_id_from_api_key: - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="You are not authorized to access these user details") - - try: - database_functions.functions.set_password(cnx, database_type, user_id, hash_pw) - return {"detail": "Password updated."} - except Exception as e: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"User not found. Error: {str(e)}") - -@app.put("/api/data/user/set_email") -async def api_set_email(cnx=Depends(get_database_connection), api_key: str = Depends(get_api_key_from_header), - user_id: int = Body(...), new_email: str = Body(...)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - elevated_access = await has_elevated_access(api_key, cnx) - - if not elevated_access: - # Get user ID from API key - user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if user_id != user_id_from_api_key: - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, - detail="You are not authorized to access these user details") - try: - database_functions.functions.set_email(cnx, database_type, user_id, new_email) - return {"detail": "Email updated."} - except: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found") - - -@app.put("/api/data/user/set_username") -async def api_set_username(cnx=Depends(get_database_connection), api_key: str = Depends(get_api_key_from_header), - user_id: int = Body(...), new_username: str = Body(...)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - elevated_access = await has_elevated_access(api_key, cnx) - - if not elevated_access: - # Get user ID from API key - user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if user_id != user_id_from_api_key: - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, - detail="You are not authorized to access these user details") - try: - database_functions.functions.set_username(cnx, database_type, user_id, new_username.lower()) - return {"detail": "Username updated."} - except: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found") - - -@app.put("/api/data/user/set_isadmin") -async def api_set_isadmin(is_admin: bool = Depends(check_if_admin), cnx=Depends(get_database_connection), - user_id: int = Body(...), isadmin: bool = Body(...)): - database_functions.functions.set_isadmin(cnx, database_type, user_id, isadmin) - return {"detail": "IsAdmin status updated."} - - -@app.get("/api/data/user/final_admin/{user_id}") -async def api_final_admin(is_admin: bool = Depends(check_if_admin), cnx=Depends(get_database_connection), - user_id: int = Path(...)): - is_final_admin = database_functions.functions.final_admin(cnx, database_type, user_id) - return {"final_admin": is_final_admin} - - -@app.delete("/api/data/user/delete/{user_id}") -async def api_delete_user(is_admin: bool = Depends(check_if_admin), cnx=Depends(get_database_connection), - user_id: int = Path(...)): - database_functions.functions.delete_user(cnx, database_type, user_id) - return {"status": "User deleted"} - - -class OIDCProviderValues(BaseModel): - provider_name: str - client_id: str - client_secret: str - authorization_url: str - token_url: str - user_info_url: str - button_text: str - scope: Optional[str] = "openid email profile" - button_color: Optional[str] = "#000000" - button_text_color: Optional[str] = "#000000" - icon_svg: Optional[str] = None - name_claim: Optional[str] = None - email_claim: Optional[str] = None - username_claim: Optional[str] = None - roles_claim: Optional[str] = None - user_role: Optional[str] = None - admin_role: Optional[str] = None - -@app.post("/api/data/add_oidc_provider") -async def api_add_oidc_provider( - is_admin: bool = Depends(check_if_admin), - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header), - provider_values: OIDCProviderValues = Body(...)): - try: - provider_id = database_functions.functions.add_oidc_provider(cnx, database_type, ( - provider_values.provider_name, - provider_values.client_id, - provider_values.client_secret, - provider_values.authorization_url, - provider_values.token_url, - provider_values.user_info_url, - provider_values.button_text, - provider_values.scope, - provider_values.button_color, - provider_values.button_text_color, - provider_values.icon_svg, - provider_values.name_claim, - provider_values.email_claim, - provider_values.username_claim, - provider_values.roles_claim, - provider_values.user_role, - provider_values.admin_role - )) - if not provider_id: - raise HTTPException( - status_code=500, - detail="Failed to create provider - no provider ID returned" - ) - return {"detail": "Success", "provider_id": provider_id} - except psycopg.errors.UniqueViolation: - raise HTTPException( - status_code=409, - detail="A provider with this name already exists" - ) - except Exception as e: - logging.error(f"Unexpected error adding provider: {str(e)}") - raise HTTPException( - status_code=500, - detail=f"An unexpected error occurred while creating the provider: {str(e)}" - ) - -@app.post("/api/data/remove_oidc_provider") -async def api_remove_oidc_provider( - is_admin: bool = Depends(check_if_admin), - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header), - provider_id: int = Body(...)): - try: - result = database_functions.functions.remove_oidc_provider(cnx, database_type, provider_id) - if not result: - raise HTTPException( - status_code=404, - detail="Provider not found" - ) - return {"detail": "Success"} - except Exception as e: - logging.error(f"Unexpected error removing provider: {str(e)}") - raise HTTPException( - status_code=500, - detail=f"An unexpected error occurred while removing the provider: {str(e)}" - ) - -@app.get("/api/data/list_oidc_providers") -async def api_list_oidc_providers( - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - try: - providers = database_functions.functions.list_oidc_providers(cnx, database_type) - return {"providers": providers} - except Exception as e: - logging.error(f"Unexpected error listing providers: {str(e)}") - raise HTTPException( - status_code=500, - detail=f"An unexpected error occurred while listing providers: {str(e)}" - ) - -# Public reqeust for login info -@app.get("/api/data/public_oidc_providers") -async def api_public_oidc_providers(cnx=Depends(get_database_connection)): - """Get minimal OIDC provider info needed for login screen buttons.""" - try: - providers = database_functions.functions.get_public_oidc_providers(cnx, database_type) - return {"providers": providers} - except Exception as e: - logging.error(f"Unexpected error getting public provider info: {str(e)}") - raise HTTPException( - status_code=500, - detail=f"An unexpected error occurred: {str(e)}" - ) - - -@app.put("/api/data/user/set_theme") -async def api_set_theme(user_id: int = Body(...), new_theme: str = Body(...), cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - # Allow the action if the API key belongs to the user or it's the web API key - if key_id == user_id or is_web_key: - database_functions.functions.set_theme(cnx, database_type, user_id, new_theme) - return {"message": "Theme updated successfully"} - else: - raise HTTPException(status_code=403, - detail="You can only set your own theme!") - -@app.post("/api/data/create_api_key") -async def api_create_api_key( - user_id: int = Body(..., embed=True), - rssonly: bool = Body(..., embed=True), - podcast_ids: Optional[List[int]] = Body(None, embed=True), - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - if user_id == key_id or is_web_key: - if rssonly: - new_key = database_functions.functions.create_rss_key(cnx, database_type, user_id, podcast_ids) - else: - new_key = database_functions.functions.create_api_key(cnx, database_type, user_id) - return {"rss_key" if rssonly else "api_key": new_key} - else: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - -@app.post("/api/data/set_rss_key_podcasts") -async def api_set_rss_key_podcasts( - user_id: int = Body(..., embed=True), - rss_key_id: int = Body(..., embed=True), - podcast_ids: Optional[List[int]] = Body(None, embed=True), - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - if user_id == key_id or is_web_key: - database_functions.functions.set_rss_key_podcasts(cnx, database_type, rss_key_id, podcast_ids) - return {"message": "Podcast IDs updated successfully"} - else: - raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission") - -class SendTestEmailValues(BaseModel): - server_name: str - server_port: str - from_email: str - send_mode: str - encryption: str - auth_required: bool - email_username: str - email_password: str - to_email: str - message: str # Add this line - - -def send_email(payload: SendTestEmailValues): - # This is now a synchronous function - msg = MIMEMultipart() - msg['From'] = payload.from_email - msg['To'] = payload.to_email - msg['Subject'] = "Test Email" - msg.attach(MIMEText(payload.message, 'plain')) - try: - port = int(payload.server_port) # Convert port to int here - if payload.encryption == "SSL/TLS": - server = smtplib.SMTP_SSL(payload.server_name, port) - else: - server = smtplib.SMTP(payload.server_name, port) - if payload.encryption == "StartTLS": - server.starttls() - if payload.auth_required: - server.login(payload.email_username, payload.email_password) - server.send_message(msg) - server.quit() - return "Email sent successfully" - except Exception as e: - raise Exception(f"Failed to send email: {str(e)}") - -@app.post("/api/data/send_test_email") -async def api_send_email(payload: SendTestEmailValues, is_admin: bool = Depends(check_if_admin), cnx=Depends(get_database_connection), api_key: str = Depends(get_api_key_from_header)): - # Assume API key validation logic here - try: - # Use run_in_threadpool to execute the synchronous send_email function - send_status = await run_in_threadpool(send_email, payload) - return {"email_status": send_status} - except Exception as e: - print(traceback.format_exc()) # Print full exception information - raise HTTPException(status_code=500, detail=f"Failed to send email: {str(e)}") - -class SendEmailValues(BaseModel): - to_email: str - subject : str - message: str # Add this line - -def send_email_with_settings(email_values, database_type, payload: SendEmailValues): - - try: - msg = MIMEMultipart() - msg['From'] = email_values['FromEmail'] - msg['To'] = payload.to_email - msg['Subject'] = payload.subject - msg.attach(MIMEText(payload.message, 'plain')) - - try: - port = int(email_values['ServerPort']) - if email_values['Encryption'] == "SSL/TLS": - server = smtplib.SMTP_SSL(email_values['ServerName'], port) - elif email_values['Encryption'] == "StartTLS": - server = smtplib.SMTP(email_values['ServerName'], port) - server.starttls() - else: - server = smtplib.SMTP(email_values['ServerName'], port) - - if email_values['AuthRequired']: - server.login(email_values['Username'], email_values['Password']) - - server.send_message(msg) - server.quit() - return "Email sent successfully" - except Exception as e: - raise Exception(f"Failed to send email: {str(e)}") - except Exception as e: - logging.error(f"Failed to send email: {str(e)}", exc_info=True) - raise Exception(f"Failed to send email: {str(e)}") - - -@app.post("/api/data/send_email") -async def api_send_email(payload: SendEmailValues, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Invalid API key") - - email_values = database_functions.functions.get_email_settings(cnx, database_type) - if not email_values: - raise HTTPException(status_code=404, detail="Email settings not found") - - try: - send_status = await run_in_threadpool(send_email_with_settings, email_values, database_type, payload) - return {"email_status": send_status} - except Exception as e: - raise HTTPException(status_code=500, detail=f"Failed to send email: {str(e)}") - - -@app.post("/api/data/save_email_settings") -async def api_save_email_settings(email_settings: dict = Body(..., embed=True), - is_admin: bool = Depends(check_if_admin), cnx=Depends(get_database_connection)): - database_functions.functions.save_email_settings(cnx, database_type, email_settings) - return {"message": "Email settings saved."} - - -@app.get("/api/data/get_encryption_key") -async def api_get_encryption_key(is_admin: bool = Depends(check_if_admin), cnx=Depends(get_database_connection)): - encryption_key = database_functions.functions.get_encryption_key(cnx, database_type) - return {"encryption_key": encryption_key} - - -@app.get("/api/data/get_email_settings") -async def api_get_email_settings(is_admin: bool = Depends(check_if_admin), cnx=Depends(get_database_connection)): - email_settings = database_functions.functions.get_email_settings(cnx, database_type) - return email_settings - - -class DeleteAPIKeyHeaders(BaseModel): - api_id: str - user_id: str - - -@app.delete("/api/data/delete_api_key") -async def api_delete_api_key(payload: DeleteAPIKeyHeaders, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - elevated_access = await has_elevated_access(api_key, cnx) - - if not elevated_access: - # Get user ID from API key - user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if payload.user_id != user_id_from_api_key: - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, - detail="You are not authorized to access or remove other users api-keys.") - # Check if the API key to be deleted is the same as the one used in the current request - if database_functions.functions.is_same_api_key(cnx, database_type, payload.api_id, api_key): - raise HTTPException(status_code=403, - detail="You cannot delete the API key that is currently in use.") - # Check if the API key belongs to the guest user (user_id 1) - if database_functions.functions.belongs_to_guest_user(cnx, database_type, payload.api_id): - raise HTTPException(status_code=403, - detail="Cannot delete guest user api.") - - # Proceed with deletion if the checks pass - database_functions.functions.delete_api(cnx, database_type, payload.api_id) - return {"detail": "API key deleted."} - - -@app.get("/api/data/get_api_info/{user_id}") -async def api_get_api_info(cnx=Depends(get_database_connection), api_key: str = Depends(get_api_key_from_header), - user_id: int = Path(...)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - elevated_access = await has_elevated_access(api_key, cnx) - - if not elevated_access: - # Get user ID from API key - user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if user_id != user_id_from_api_key: - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, - detail="You are not authorized to access these user details") - api_information = database_functions.functions.get_api_info(database_type, cnx, user_id) - if api_information: - return {"api_info": api_information} - else: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found") - - -class ResetCodePayload(BaseModel): - email: str - username: str - - -class ResetPasswordPayload(BaseModel): - email: str - hashed_pw: str - - -@app.post("/api/data/reset_password_create_code") -async def api_reset_password_route(payload: ResetCodePayload, cnx=Depends(get_database_connection)): - email_setup = database_functions.functions.get_email_settings(cnx, database_type) - if email_setup['Server_Name'] == "default_server": - raise HTTPException(status_code=403, - detail="Email settings not configured. Please contact your administrator.") - else: - check_user = database_functions.functions.check_reset_user(cnx, database_type, payload.username.lower(), payload.email) - if check_user: - create_code = database_functions.functions.reset_password_create_code(cnx, database_type, payload.email) - - # Create a SendTestEmailValues instance with the email setup values and the password reset code - email_payload = SendEmailValues( - to_email=payload.email, - subject="Pinepods Password Reset Code", - message=f"Your password reset code is {create_code}" - ) - # Send the email with the password reset code - email_send = send_email_with_settings(email_setup, email_payload) - if email_send: - return {"code_created": True} - else: - database_functions.functions.reset_password_remove_code(cnx, database_type, payload.email) - raise HTTPException(status_code=500, detail="Failed to send email") - - return {"user_exists": user_exists} - else: - raise HTTPException(status_code=404, detail="User not found") - -class ResetVerifyCodePayload(BaseModel): - reset_code: str - email: str - new_password: str - -@app.post("/api/data/verify_and_reset_password") -async def api_verify_and_reset_password_route(payload: ResetVerifyCodePayload, cnx=Depends(get_database_connection)): - code_valid = database_functions.functions.verify_reset_code(cnx, database_type, payload.email, payload.reset_code) - if code_valid is None: - raise HTTPException(status_code=404, detail="User not found") - elif not code_valid: - raise HTTPException(status_code=400, detail="Code is invalid") - # return {"code_valid": False} - - message = database_functions.functions.reset_password_prompt(cnx, database_type, payload.email, payload.new_password) - if message is None: - raise HTTPException(status_code=500, detail="Failed to reset password") - return {"message": message} - -class EpisodeMetadata(BaseModel): - episode_id: int - user_id: int - person_episode: bool = False # Default to False if not specified - is_youtube: bool = False - -@app.post("/api/data/get_episode_metadata") -async def api_get_episode_metadata(data: EpisodeMetadata, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if key_id == data.user_id or is_web_key: - episode = database_functions.functions.get_episode_metadata( - database_type, - cnx, - data.episode_id, - data.user_id, - data.person_episode, - data.is_youtube - ) - return {"episode": episode} - else: - raise HTTPException(status_code=403, - detail="You can only get metadata for yourself!") - -class GetPlaybackSpeed(BaseModel): - podcast_id: Optional[int] = None - user_id: int - -@app.post("/api/data/get_playback_speed") -async def api_get_playback_speed(data: GetPlaybackSpeed, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - if key_id == data.user_id or is_web_key: - # Fix the parameter order to match the function definition - is_youtube = False # Add the is_youtube parameter - playback_speed = database_functions.functions.get_playback_speed( - cnx, # Connection should be first - database_type, # Then database type - data.user_id, # Then user_id - is_youtube, # Then is_youtube parameter - data.podcast_id # Then optional podcast_id - ) - return {"playback_speed": playback_speed} - else: - raise HTTPException(status_code=403, - detail="You can only get metadata for yourself!") - -@app.get("/api/data/generate_mfa_secret/{user_id}") -async def generate_mfa_secret(user_id: int, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - # Perform API key validation and user authorization checks as before - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - logging.warning(f"Invalid API key: {api_key}") - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - logging.info(f"Is web key: {is_web_key}") - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - logging.info(f"Key ID from API key: {key_id}") - - # Allow the action if the API key belongs to the user or it's the web API key - if key_id == user_id or is_web_key: - user_details = database_functions.functions.get_user_details_id(cnx, database_type, user_id) - if not user_details: - raise HTTPException(status_code=404, detail="User not found") - - email = user_details['Email'] - secret = random_base32() # Correctly generate a random base32 secret - # Store the secret in temporary storage - temp_mfa_secrets[user_id] = secret - totp = TOTP(secret) - provisioning_uri = totp.provisioning_uri(name=email, issuer_name="Pinepods") - - # Generate QR code as SVG - qr = qrcode.QRCode( - version=1, - error_correction=qrcode.constants.ERROR_CORRECT_L, - box_size=10, - border=4, - ) - qr.add_data(provisioning_uri) - qr.make(fit=True) - - # Convert the QR code to an SVG string - factory = qrcode.image.svg.SvgPathImage - img = qr.make_image(fill_color="black", back_color="white", image_factory=factory) - buffered = io.BytesIO() - img.save(buffered) - qr_code_svg = buffered.getvalue().decode("utf-8") - logging.info(f"Generated MFA secret for user {user_id}") - - return { - "secret": secret, - "qr_code_svg": qr_code_svg # Directly return the SVG string - } - else: - logging.warning("Attempted to generate MFA secret for another user") - raise HTTPException(status_code=403, - detail="You can only generate MFA secrets for yourself!") - -class VerifyTempMFABody(BaseModel): - user_id: int - mfa_code: str - -@app.post("/api/data/verify_temp_mfa") -async def verify_temp_mfa(body: VerifyTempMFABody, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - # Perform API key validation and user authorization checks as before - logging.info(f"Verifying MFA code for user_id: {body.user_id} with code: {body.mfa_code}") - - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - logging.warning(f"Invalid API key: {api_key}") - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - logging.info(f"Is web key: {is_web_key}") - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - logging.info(f"Key ID from API key: {key_id}") - - if key_id == body.user_id or is_web_key: - secret = temp_mfa_secrets.get(body.user_id) - if secret is None: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, - detail="MFA setup not initiated or expired.") - if secret: - logging.info(f"Retrieved secret for user_id") - else: - logging.warning(f"No secret found for user_id") - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="MFA setup not initiated or expired.") - - totp = TOTP(secret) - if totp.verify(body.mfa_code): - try: - # Attempt to save the MFA secret to permanent storage - success = database_functions.functions.save_mfa_secret(database_type, cnx, body.user_id, secret) - if success: - # Remove the temporary secret upon successful verification and storage - del temp_mfa_secrets[body.user_id] - logging.info(f"MFA secret successfully saved for user_id: {body.user_id}") - return {"verified": True} - else: - # Handle unsuccessful save attempt (e.g., database error) - logging.error("Failed to save MFA secret to database.") - logging.error(f"Failed to save MFA secret for user_id: {body.user_id}") - return JSONResponse(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - content={"message": "Failed to save MFA secret. Please try again."}) - except Exception as e: - logging.error(f"Exception saving MFA secret: {e}") - return JSONResponse(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - content={"message": "An error occurred. Please try again."}) - else: - return {"verified": False} - else: - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, - detail="You are not authorized to verify MFA for this user.") - -# Cleanup task for temp_mfa_secrets -async def cleanup_temp_mfa_secrets(): - while True: - # Wait for 1 hour before running cleanup - await asyncio.sleep(3600) - # Current timestamp - current_time = time.time() - # Iterate over the temp_mfa_secrets and remove entries older than 1 hour - for user_id, (secret, timestamp) in list(temp_mfa_secrets.items()): - if current_time - timestamp > 3600: - del temp_mfa_secrets[user_id] - logging.info("Cleanup task: Removed expired MFA setup entries.") - - -class MfaSecretData(BaseModel): - user_id: int - mfa_secret: str - - -@app.post("/api/data/save_mfa_secret") -async def api_save_mfa_secret(data: MfaSecretData, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - logging.info(f"Received request to save MFA secret for user {data.user_id}") - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - logging.warning(f"Invalid API key: {api_key}") - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - logging.info(f"Is web key: {is_web_key}") - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - logging.info(f"Key ID from API key: {key_id}") - - # Allow the action if the API key belongs to the user or it's the web API key - if key_id == data.user_id or is_web_key: - success = database_functions.functions.save_mfa_secret(database_type, cnx, data.user_id, data.mfa_secret) - if success: - logging.info("MFA secret saved successfully") - return {"status": "success"} - else: - logging.error("Failed to save MFA secret") - return {"status": "error"} - else: - logging.warning("Attempted to save MFA secret for another user") - raise HTTPException(status_code=403, - detail="You can only save MFA secrets for yourself!") - -@app.get("/api/data/check_mfa_enabled/{user_id}") -async def api_check_mfa_enabled(user_id: int, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - elevated_access = await has_elevated_access(api_key, cnx) - - if not elevated_access: - # Get user ID from API key - user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if user_id != user_id_from_api_key: - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, - detail="You are not authorized to check mfa status for other users.") - logging.info(f"Database Type: {database_type}, Connection: {cnx}, User ID: {user_id}") - - is_enabled = database_functions.functions.check_mfa_enabled(database_type, cnx, user_id) - return {"mfa_enabled": is_enabled} - - -class VerifyMFABody(BaseModel): - user_id: int - mfa_code: str - - -@app.post("/api/data/verify_mfa") -async def api_verify_mfa(body: VerifyMFABody, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - # Allow the action if the API key belongs to the user or it's the web API key - if key_id == body.user_id or is_web_key: - secret = database_functions.functions.get_mfa_secret(database_type, cnx, body.user_id) - - if secret is None: - return {"verified": False} - else: - totp = TOTP(secret) - verification_result = totp.verify(body.mfa_code) - return {"verified": verification_result} - else: - raise HTTPException(status_code=403, - detail="You can only verify your own login code!") - - -class UserIDBody(BaseModel): - user_id: int - - -@app.delete("/api/data/delete_mfa") -async def api_delete_mfa(body: UserIDBody, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - elevated_access = await has_elevated_access(api_key, cnx) - - if not elevated_access: - # Get user ID from API key - user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if body.user_id != user_id_from_api_key: - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, - detail="You are not authorized to access these user details") - - result = database_functions.functions.delete_mfa_secret(database_type, cnx, body.user_id) - if result: - return {"deleted": result} - else: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found") - -# Model for request data -class TimeZoneInfo(BaseModel): - user_id: int - timezone: str - hour_pref: int - date_format: str - - -# FastAPI endpoint -@app.post("/api/data/setup_time_info") -async def setup_timezone_info(data: TimeZoneInfo, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - elevated_access = await has_elevated_access(api_key, cnx) - - if not elevated_access: - # Get user ID from API key - user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if data.user_id != user_id_from_api_key: - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, - detail="You are not authorized to access these user details") - - success = database_functions.functions.setup_timezone_info(database_type, cnx, data.user_id, data.timezone, - data.hour_pref, data.date_format) - if success: - return {"success": success} - else: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found") - - -@app.get("/api/data/get_time_info") -async def get_time_info(user_id: int, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - elevated_access = await has_elevated_access(api_key, cnx) - - if not elevated_access: - # Get user ID from API key - user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if user_id != user_id_from_api_key: - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, - detail="You are not authorized to access these user details") - timezone, hour_pref, date_format = database_functions.functions.get_time_info(database_type, cnx, user_id) - if timezone: - return {"timezone": timezone, "hour_pref": hour_pref, "date_format": date_format} - else: - raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found") - - -@app.get("/api/data/first_login_done/{user_id}") -async def first_login_done(user_id: int, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - # Allow the action if the API key belongs to the user or it's the web API key - if key_id == user_id or is_web_key: - first_login_status = database_functions.functions.first_login_done(database_type, cnx, user_id) - return {"FirstLogin": first_login_status} - else: - raise HTTPException(status_code=403, - detail="You can only run first login for yourself!") - -class SelectedEpisodesDelete(BaseModel): - selected_episodes: List[int] = Field(..., title="List of Episode IDs") - user_id: int = Field(..., title="User ID") - - -@app.post("/api/data/delete_selected_episodes") -async def delete_selected_episodes(data: SelectedEpisodesDelete, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - # Allow the action if the API key belongs to the user or it's the web API key - if key_id == data.user_id or is_web_key: - if is_valid_key: - delete_status = database_functions.functions.delete_selected_episodes(cnx, database_type, data.selected_episodes, - data.user_id) - return {"status": delete_status} - else: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - else: - raise HTTPException(status_code=403, - detail="You can only delete your own selected episodes!") - -class SearchPodcastData(BaseModel): - search_term: str - user_id: int - - -@app.post("/api/data/search_data") -async def search_data(data: SearchPodcastData, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if is_valid_key: - result = database_functions.functions.search_data(database_type, cnx, data.search_term, data.user_id) - return {"data": result} - else: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - -class QueuePodData(BaseModel): - episode_id: int - user_id: int - is_youtube: bool = False - -@app.post("/api/data/queue_pod") -async def queue_pod(data: QueuePodData, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if key_id == data.user_id or is_web_key: - ep_status = database_functions.functions.check_queued( - database_type, cnx, data.episode_id, data.user_id, data.is_youtube - ) - if ep_status: - return {"data": f"{'Video' if data.is_youtube else 'Episode'} already in queue"} - else: - result = database_functions.functions.queue_pod( - database_type, cnx, data.episode_id, data.user_id, data.is_youtube - ) - return {"data": result} - else: - raise HTTPException(status_code=403, - detail=f"You can only add {'videos' if data.is_youtube else 'episodes'} to your own queue!") - -class QueueRmData(BaseModel): - episode_id: int - user_id: int - is_youtube: bool = False - -@app.post("/api/data/remove_queued_pod") -async def remove_queued_pod(data: QueueRmData, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if key_id == data.user_id or is_web_key: - result = database_functions.functions.remove_queued_pod( - database_type, cnx, data.episode_id, data.user_id, data.is_youtube - ) - return result - else: - raise HTTPException(status_code=403, - detail=f"You can only remove {'videos' if data.is_youtube else 'episodes'} for your own queue!") -# class QueuedEpisodesData(BaseModel): -# user_id: int - - -@app.get("/api/data/get_queued_episodes") -async def get_queued_episodes(user_id: int = Query(...), cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - # Allow the action if the API key belongs to the user or it's the web API key - if key_id == user_id or is_web_key: - result = database_functions.functions.get_queued_episodes(database_type, cnx, user_id) - return {"data": result} - else: - raise HTTPException(status_code=403, - detail="You can only get episodes from your own queue!") - -class ReorderRequest(BaseModel): - episode_ids: List[int] - -@app.post("/api/data/reorder_queue") -async def reorder_queue(request: ReorderRequest, user_id: int = Query(...), cnx=Depends(get_database_connection), api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - # Allow the action if the API key belongs to the user or it's the web API key - if key_id == user_id or is_web_key: - success = database_functions.functions.reorder_queued_episodes(database_type, cnx, user_id, request.episode_ids) - if success: - return {"message": "Queue reordered successfully"} - else: - raise HTTPException(status_code=500, detail="Failed to reorder the queue") - else: - raise HTTPException(status_code=403, detail="You can only reorder your own queue!") - -@app.get("/api/data/check_episode_in_db/{user_id}") -async def check_episode_in_db(user_id: int, episode_title: str = Query(...), episode_url: str = Query(...), cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission") - - if database_functions.functions.id_from_api_key(cnx, database_type, api_key) != user_id: - raise HTTPException(status_code=403, detail="You can only check episodes for your own account") - - episode_exists = database_functions.functions.check_episode_exists(cnx, database_type, user_id, episode_title, episode_url) - return {"episode_in_db": episode_exists} - -@app.get("/api/data/get_pinepods_version") -async def get_pinepods_version(cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - result = database_functions.functions.get_pinepods_version() - return {"data": result} - -@app.post("/api/data/share_episode/{episode_id}") -async def share_episode(episode_id: int, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - import uuid - from datetime import datetime, timedelta - # Verify API key validity - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have the correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - # Generate the URL key and expiration date - url_key = str(uuid.uuid4()) # Generates a unique URL key - expiration_date = datetime.utcnow() + timedelta(days=60) # Expire in 60 days - - # Call database function to insert the shared episode entry - result = database_functions.functions.add_shared_episode(database_type, cnx, episode_id, url_key, expiration_date) - - if result: - return {"url_key": url_key} - else: - raise HTTPException(status_code=500, detail="Failed to share episode") - - -@app.get("/api/data/cleanup_tasks") -async def api_cleanup_tasks( - background_tasks: BackgroundTasks, - is_admin: bool = Depends(check_if_admin) -) -> Dict[str, str]: - """ - Endpoint to trigger cleanup of old PeopleEpisodes and expired SharedEpisodes - """ - background_tasks.add_task(cleanup_tasks) - return {"detail": "Cleanup tasks initiated."} - -def cleanup_tasks(): - """ - Background task to run database cleanup operations - """ - cnx = create_database_connection() - try: - database_functions.functions.cleanup_old_episodes(cnx, database_type) - except Exception as e: - print(f"Error during cleanup tasks: {str(e)}") - finally: - close_database_connection(cnx) - -@app.get("/api/data/update_playlists") -async def api_update_playlists( - background_tasks: BackgroundTasks, - is_admin: bool = Depends(check_if_admin) -) -> Dict[str, str]: - """ - Endpoint to trigger playlist updates for all playlists (system and user-defined) - """ - background_tasks.add_task(update_playlists_task) - return {"detail": "Playlist update initiated."} - -def update_playlists_task(): - """ - Background task to update all playlists - """ - print("Starting background playlist update task") - try: - cnx = create_database_connection() - try: - database_functions.functions.update_all_playlists(cnx, database_type) - print("Background playlist update task completed successfully") - except Exception as e: - print(f"Error in update_all_playlists: {str(e)}") - if hasattr(e, '__traceback__'): - import traceback - print(traceback.format_exc()) - finally: - close_database_connection(cnx) - except Exception as e: - print(f"Critical error in update_playlists_task: {str(e)}") - if hasattr(e, '__traceback__'): - import traceback - print(traceback.format_exc()) - - -class PlaylistCreate(BaseModel): - name: str - description: Optional[str] - podcast_ids: Optional[List[int]] - include_unplayed: bool = True - include_partially_played: bool = True - include_played: bool = False - play_progress_min: Optional[float] = None # Made optional - play_progress_max: Optional[float] = None # Made optional - time_filter_hours: Optional[int] = None # Made optional - min_duration: Optional[int] - max_duration: Optional[int] - sort_order: str = "date_desc" - group_by_podcast: bool = False - max_episodes: Optional[int] - user_id: int - icon_name: str = "ph-playlist" - -class PlaylistDelete(BaseModel): - user_id: int - playlist_id: int - -class PlaylistsGet(BaseModel): - user_id: int - -@app.post("/api/data/create_playlist") -async def api_create_playlist( - data: PlaylistCreate, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -) -> Dict[str, Any]: - """ - Create a new custom playlist - """ - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException( - status_code=403, - detail="Your API key is either invalid or does not have correct permission" - ) - - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if key_id == data.user_id or is_web_key: - try: - playlist_id = database_functions.functions.create_playlist( - cnx, - database_type, - data - ) - return {"detail": "Playlist created successfully", "playlist_id": playlist_id} - except Exception as e: - raise HTTPException(status_code=400, detail=str(e)) - else: - raise HTTPException( - status_code=403, - detail="You can only create playlists for yourself!" - ) - -@app.delete("/api/data/delete_playlist") -async def api_delete_playlist( - data: PlaylistDelete, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -) -> Dict[str, str]: - """ - Delete a playlist - """ - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException( - status_code=403, - detail="Your API key is either invalid or does not have correct permission" - ) - - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if key_id == data.user_id or is_web_key: - try: - database_functions.functions.delete_playlist( - cnx, - database_type, - data.user_id, - data.playlist_id - ) - return {"detail": "Playlist deleted successfully"} - except Exception as e: - raise HTTPException(status_code=400, detail=str(e)) - else: - raise HTTPException( - status_code=403, - detail="You can only delete your own playlists!" - ) - -@app.get("/api/data/get_playlists") -async def api_get_playlists( - user_id: int, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -) -> Dict[str, List[Dict[str, Any]]]: - """ - Get all playlists accessible to the user - """ - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException( - status_code=403, - detail="Your API key is either invalid or does not have correct permission" - ) - - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if key_id == user_id or is_web_key: - try: - playlists = database_functions.functions.get_playlists( - cnx, - database_type, - user_id - ) - return {"playlists": playlists} - except Exception as e: - raise HTTPException(status_code=400, detail=str(e)) - else: - raise HTTPException( - status_code=403, - detail="You can only view your own playlists!" - ) - - -@app.get("/api/data/get_playlist_episodes") -async def api_get_playlist_episodes( - user_id: int, - playlist_id: int, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -) -> Dict[str, Any]: - """ - Get all episodes in a playlist - """ - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException( - status_code=403, - detail="Your API key is either invalid or does not have correct permission" - ) - - try: - return database_functions.functions.get_playlist_episodes( - cnx, - database_type, - user_id, - playlist_id - ) - except Exception as e: - raise HTTPException(status_code=400, detail=str(e)) - - -@app.get("/api/data/episode_by_url/{url_key}") -async def get_episode_by_url_key(url_key: str, cnx=Depends(get_database_connection)): - # Find the episode ID associated with the URL key - print('running inside ep by url') - episode_id = database_functions.functions.get_episode_id_by_url_key(database_type, cnx, url_key) - print(f'outside dunc {episode_id}') - if episode_id is None: - raise HTTPException(status_code=404, detail="Invalid or expired URL key") - - # Now retrieve the episode metadata using the episode_id - try: - episode_data = database_functions.functions.get_episode_metadata_id(database_type, cnx, episode_id) # UserID is None because we are bypassing normal user auth for shared links - return {"episode": episode_data} - except ValueError as e: - raise HTTPException(status_code=404, detail=str(e)) - - -class LoginInitiateData(BaseModel): - user_id: int - nextcloud_url: str - -@app.post("/api/data/initiate_nextcloud_login") -async def initiate_nextcloud_login(data: LoginInitiateData, cnx=Depends(get_database_connection), api_key: str = Depends(get_api_key_from_header)): - import requests - - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - # Allow the action if the API key belongs to the user or it's the web API key - if key_id == data.user_id or is_web_key: - login_url = f"{data.nextcloud_url}/index.php/login/v2" - try: - response = requests.post(login_url) - response.raise_for_status() # This will raise an HTTPError for bad responses - return response.json() - except requests.HTTPError as http_err: - # Log the detailed error - detail = f"Nextcloud login failed with status code {response.status_code}: {response.text}" - raise HTTPException(status_code=response.status_code, detail=detail) - except requests.RequestException as req_err: - # General request exception handling (e.g., network issues) - raise HTTPException(status_code=500, detail=f"Failed to reach Nextcloud server: {str(req_err)}") - else: - raise HTTPException(status_code=403, detail="You are not authorized to initiate this action.") - -class GpodderAuthRequest(BaseModel): - gpodder_url: str - gpodder_username: str - gpodder_password: str - -@app.post("/api/data/verify_gpodder_auth") -async def verify_gpodder_auth(request: GpodderAuthRequest): - from requests.auth import HTTPBasicAuth - auth = HTTPBasicAuth(request.gpodder_username, request.gpodder_password) - async with httpx.AsyncClient() as client: - try: - response = await client.post(f"{request.gpodder_url}/api/2/auth/{request.gpodder_username}/login.json", auth=auth) - response.raise_for_status() # Will raise an httpx.HTTPStatusError for 4XX/5XX responses - if response.status_code == 200: - return {"status": "success", "message": "Logged in!"} - else: - raise HTTPException(status_code=response.status_code, detail="Authentication failed") - except httpx.HTTPStatusError as e: - raise HTTPException(status_code=e.response.status_code, detail="Authentication failed") - except Exception as e: - raise HTTPException(status_code=500, detail="Internal Server Error") - -class GpodderSettings(BaseModel): - user_id: int - gpodder_url: str - gpodder_token: str - -@app.post("/api/data/add_gpodder_settings") -async def add_gpodder_settings(data: GpodderSettings, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - # Allow the action if the API key belongs to the user or it's the web API key - if key_id == data.user_id or is_web_key: - result = database_functions.functions.add_gpodder_settings(database_type, cnx, data.user_id, data.gpodder_url, data.gpodder_token) - return {"data": result} - else: - raise HTTPException(status_code=403, - detail="You can only add your own gpodder data!") - -class GpodderSettings(BaseModel): - user_id: int - gpodder_url: str - gpodder_username: str - gpodder_password: str - - -@app.post("/api/data/add_gpodder_server") -async def add_gpodder_server( - data: GpodderSettings, - background_tasks: BackgroundTasks, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if key_id == data.user_id or is_web_key: - # First add the gpodder server - result = database_functions.functions.add_gpodder_server( - database_type, - cnx, - data.user_id, - data.gpodder_url, - data.gpodder_username, - data.gpodder_password - ) - - # Get the user's gpodder settings - similar to what refresh_nextcloud_subscription does - if database_type == "postgresql": - cursor = cnx.cursor() - cursor.execute(''' - SELECT "userid", "gpodderurl", "gpoddertoken", "gpodderloginname" - FROM "Users" - WHERE "userid" = %s AND "gpodderurl" IS NOT NULL - ''', (data.user_id,)) - user = cursor.fetchone() - else: - cursor = cnx.cursor() - cursor.execute(''' - SELECT UserID, GpodderUrl, GpodderToken, GpodderLoginName - FROM Users - WHERE UserID = %s AND GpodderUrl IS NOT NULL - ''', (data.user_id,)) - user = cursor.fetchone() - - if user: - if isinstance(user, dict): - if database_type == "postgresql": - gpodder_url = user["gpodderurl"] - gpodder_token = user["gpoddertoken"] - gpodder_login = user["gpodderloginname"] - else: - gpodder_url = user["GpodderUrl"] - gpodder_token = user["GpodderToken"] - gpodder_login = user["GpodderLoginName"] - else: - _, gpodder_url, gpodder_token, gpodder_login = user - - # Add the refresh task for just this user - background_tasks.add_task( - refresh_nextcloud_subscription_for_user, - database_type, - data.user_id, - gpodder_url, - gpodder_token, - gpodder_login - ) - - return {"data": result} - else: - raise HTTPException(status_code=403, - detail="You can only add your own gpodder data!") - - -class RemoveGpodderSettings(BaseModel): - user_id: int - -@app.post("/api/data/remove_gpodder_settings") -async def remove_gpodder_settings(data: RemoveGpodderSettings, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - # Allow the action if the API key belongs to the user or it's the web API key - if key_id == data.user_id or is_web_key: - result = database_functions.functions.remove_gpodder_settings(database_type, cnx, data.user_id) - return {"data": result} - else: - raise HTTPException(status_code=403, - detail="You can only remove your own gpodder data!") - -@app.get("/api/data/check_gpodder_settings/{user_id}") -async def check_gpodder_settings(user_id: int, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - # Allow the action if the API key belongs to the user or it's the web API key - if key_id == user_id or is_web_key: - result = database_functions.functions.check_gpodder_settings(database_type, cnx, user_id) - return {"data": result} - else: - raise HTTPException(status_code=403, - detail="You can only remove your own gpodder data!") - -@app.get("/api/data/get_gpodder_settings/{user_id}") -async def get_gpodder_settings(user_id: int, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - # Allow the action if the API key belongs to the user or it's the web API key - if key_id == user_id or is_web_key: - result = database_functions.functions.get_gpodder_settings(database_type, cnx, user_id) - return {"data": result} - else: - raise HTTPException(status_code=403, - detail="You can only remove your own gpodder data!") - - -class NextcloudAuthRequest(BaseModel): - user_id: int - token: str - poll_endpoint: HttpUrl - nextcloud_url: HttpUrl - -@app.post("/api/data/add_nextcloud_server") -async def add_nextcloud_server(background_tasks: BackgroundTasks, data: NextcloudAuthRequest, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - elevated_access = await has_elevated_access(api_key, cnx) - - if not elevated_access: - # Get user ID from API key - user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if data.user_id != user_id_from_api_key: - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, - detail="You are not authorized to access these user details") - - # Reset gPodder settings to default - database_functions.functions.remove_gpodder_settings(database_type, cnx, data.user_id) - - # Add the polling task to the background tasks - background_tasks.add_task(poll_for_auth_completion_background, data, database_type) - - # Return 200 status code before starting to poll - return {"status": "polling"} - -async def poll_for_auth_completion_background(data: NextcloudAuthRequest, database_type): - # Create a new database connection - cnx = create_database_connection() - - try: - credentials = await poll_for_auth_completion(data.poll_endpoint, data.token) - if credentials: - logging.info(f"Nextcloud authentication successful: {credentials}") - logging.info(f"Adding Nextcloud settings for user {data.user_id}") - logging.info(f"Database Type: {database_type}, Connection: {cnx}, User ID: {data.user_id}") - logging.info(f"Nextcloud URL: {data.nextcloud_url}, Token: {data.token}") - result = database_functions.functions.add_gpodder_settings(database_type, cnx, data.user_id, str(data.nextcloud_url), credentials["appPassword"], credentials["loginName"], "nextcloud") - if not result: - logging.error("User not found") - else: - logging.error("Nextcloud authentication failed.") - finally: - # Close the database connection - cnx.close() - -# Adjusted to use httpx for async HTTP requests -async def poll_for_auth_completion(endpoint: HttpUrl, token: str): - payload = {"token": token} - timeout = 20 * 60 # 20 minutes timeout for polling - async with httpx.AsyncClient() as client: - start_time = asyncio.get_event_loop().time() - while asyncio.get_event_loop().time() - start_time < timeout: - try: - response = await client.post(str(endpoint), json=payload, headers={"Content-Type": "application/json"}) - except httpx.ConnectTimeout: - logging.info("Connection timed out, retrying...") - logging.info(f"endpoint: {endpoint}, token: {token}") - continue - if response.status_code == 200: - credentials = response.json() - logging.info(f"Authentication successful: {credentials}") - return credentials - elif response.status_code == 404: - await asyncio.sleep(5) # Non-blocking sleep - else: - logging.info(f"Polling failed with status code {response.status_code}") - raise HTTPException(status_code=500, detail="Polling for Nextcloud authentication failed.") - raise HTTPException(status_code=408, detail="Nextcloud authentication request timed out.") - -@app.get("/api/data/refresh_nextcloud_subscriptions") -async def refresh_nextcloud_subscription(background_tasks: BackgroundTasks, is_admin: bool = Depends(check_if_admin), api_key: str = Depends(get_api_key_from_header)): - cnx = create_database_connection() - try: - users = database_functions.functions.get_nextcloud_users(database_type, cnx) - finally: - close_database_connection(cnx) - for user in users: - # Handle both dictionary and tuple cases - if isinstance(user, dict): - if database_type == "postgresql": - user_id = user["userid"] - gpodder_url = user["gpodderurl"] - gpodder_token = user["gpoddertoken"] - gpodder_login = user["gpodderloginname"] - sync_type = user.get("pod_sync_type", "None") - else: - user_id = user["UserID"] - gpodder_url = user["GpodderUrl"] - gpodder_token = user["GpodderToken"] - gpodder_login = user["GpodderLoginName"] - sync_type = user.get("Pod_Sync_Type", "None") - else: # assuming tuple - # Now handle 5 values instead of 4 - if len(user) >= 5: - user_id, gpodder_url, gpodder_token, gpodder_login, sync_type = user - else: - user_id, gpodder_url, gpodder_token, gpodder_login = user - sync_type = "None" - - # Pass the sync_type to the refresh function - background_tasks.add_task( - refresh_nextcloud_subscription_for_user, - database_type, - user_id, - gpodder_url, - gpodder_token, - gpodder_login, - sync_type # Add this parameter - ) - return {"status": "success", "message": "Nextcloud subscriptions refresh initiated."} - -def refresh_nextcloud_subscription_for_user(database_type, user_id, gpodder_url, gpodder_token, gpodder_login, sync_type=None): - cnx = create_database_connection() - try: - # If sync_type wasn't passed, try to get it from the database - if not sync_type: - sync_type = database_functions.functions.get_gpodder_type(cnx, database_type, user_id) - - # Determine if this is internal based on URL - is_internal = gpodder_url == "http://localhost:8042" - print(f"Using {'internal' if is_internal else 'external'} gpodder API for user {user_id}") - - # Special handling for nextcloud sync - if sync_type == "nextcloud": - print(f"Using nextcloud gpodder API for user {user_id}") - success = database_functions.functions.refresh_nextcloud_subscription( - database_type, - cnx, - user_id, - gpodder_url, - gpodder_token, - gpodder_login, - sync_type - ) - return success - # For all other GPodder sync types, use the standard refresh function - elif sync_type in ["gpodder", "both", "external"]: - # Get default device ID - device_id = database_functions.functions.get_or_create_default_device(cnx, database_type, user_id) - - # Get device name if we have a device ID - device_name = None - if device_id: - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'SELECT DeviceName FROM "GpodderDevices" WHERE DeviceID = %s' - else: - query = "SELECT DeviceName FROM GpodderDevices WHERE DeviceID = %s" - - cursor.execute(query, (device_id,)) - result = cursor.fetchone() - cursor.close() - - if result: - device_name = result[0] if isinstance(result, tuple) else result["devicename"] - - # Determine if this is a remote sync - is_remote = not is_internal and sync_type in ["external", "both"] - - success = database_functions.functions.refresh_gpodder_subscription( - database_type, - cnx, - user_id, - gpodder_url, - gpodder_token, - gpodder_login, - sync_type, - device_id, - device_name, - is_remote - ) - return success - else: - print(f"GPodder sync not enabled for user {user_id} (sync_type: {sync_type})") - return False - finally: - close_database_connection(cnx) - -class RemoveSyncRequest(BaseModel): - user_id: int - -@app.delete("/api/data/remove_podcast_sync") -async def remove_podcast_sync(data: RemoveSyncRequest, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - # Check if the user has permission to modify this user's data - elevated_access = await has_elevated_access(api_key, cnx) - if not elevated_access: - user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - if data.user_id != user_id_from_api_key: - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, - detail="You are not authorized to modify these user settings") - - # Remove the sync settings - database_functions.functions.remove_gpodder_settings(database_type, cnx, data.user_id) - - return {"success": True, "message": "Podcast sync settings removed successfully"} - -def check_valid_feed(feed_url: str, username: Optional[str] = None, password: Optional[str] = None): - """ - Check if the provided URL points to a valid podcast feed. - Uses both direct content-type checking and feedparser validation. - - Args: - feed_url: URL of the podcast feed - username: Optional username for authenticated feeds - password: Optional password for authenticated feeds - - Returns: - feedparser.FeedParserDict: The parsed feed if valid - - Raises: - ValueError: If the feed is invalid or inaccessible - """ - import feedparser - import requests - from requests.auth import HTTPBasicAuth - from typing import Optional - - # Common podcast feed content types - VALID_CONTENT_TYPES = [ - 'application/xml', - 'text/xml', - 'application/rss+xml', - 'application/atom+xml', - 'application/rdf+xml', - ] - - def is_valid_content_type(content_type: str) -> bool: - """Check if the content type indicates XML content.""" - content_type = content_type.lower().split(';')[0].strip() - return any(valid_type in content_type for valid_type in VALID_CONTENT_TYPES) or 'xml' in content_type - - # Use requests to fetch the feed content - try: - # Set multiple user agents and accept headers to improve compatibility - headers = { - 'User-Agent': 'Mozilla/5.0 (compatible; PodcastApp/1.0; +https://example.com)', - 'Accept': 'application/rss+xml, application/atom+xml, application/xml, text/xml, */*' - } - - # Handle authentication if provided - auth = HTTPBasicAuth(username, password) if username and password else None - - # Make the request with a timeout - response = requests.get( - feed_url, - headers=headers, - auth=auth, - timeout=10, - allow_redirects=True - ) - response.raise_for_status() - - # Get content type, handling cases where it might not be present - content_type = response.headers.get('Content-Type', '').lower() - - # Special handling for feeds that don't properly set content type - if not is_valid_content_type(content_type): - # Try to parse it anyway - some feeds might be valid despite wrong content type - feed_content = response.content - parsed_feed = feedparser.parse(feed_content) - - # If we can parse it and it has required elements, accept it despite content type - if (parsed_feed.get('version') and - 'title' in parsed_feed.feed and - 'link' in parsed_feed.feed): - return parsed_feed - - # If we can't parse it, then it's probably actually invalid - raise ValueError( - f"Unexpected Content-Type: {content_type}. " - "The feed URL must point to an XML feed file." - ) - - feed_content = response.content - - except requests.RequestException as e: - raise ValueError(f"Error fetching the feed: {str(e)}") - - # Parse the feed content using feedparser - parsed_feed = feedparser.parse(feed_content) - - # Check for feedparser errors - if parsed_feed.get('bozo') == 1: - exception = parsed_feed.get('bozo_exception') - if exception: - raise ValueError(f"Feed parsing error: {str(exception)}") - - # Validate the parsed feed has required elements - if not parsed_feed.get('version'): - raise ValueError("Invalid podcast feed URL or content: Could not determine feed version.") - - required_attributes = ['title', 'link'] - missing_attributes = [attr for attr in required_attributes if attr not in parsed_feed.feed] - - if missing_attributes: - raise ValueError( - f"Feed missing required attributes: {', '.join(missing_attributes)}. " - "The URL must point to a valid podcast feed." - ) - - # Check for podcast-specific elements - has_items = len(parsed_feed.entries) > 0 - if not has_items: - raise ValueError("Feed contains no episodes.") - - return parsed_feed - - - -class CustomPodcast(BaseModel): - feed_url: str - user_id: int - username: Optional[str] = None - password: Optional[str] = None - -@app.post("/api/data/add_custom_podcast") -async def add_custom_pod(data: CustomPodcast, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - # Allow the action if the API key belongs to the user or it's the web API key - if key_id == data.user_id or is_web_key: - try: - parsed_feed = check_valid_feed(data.feed_url, data.username, data.password) - except ValueError as e: - logger.error(f"Failed to parse: {str(e)}") - raise HTTPException(status_code=400, detail=str(e)) - - # Assuming the rest of the code processes the podcast correctly - try: - podcast_id = database_functions.functions.add_custom_podcast(database_type, cnx, data.feed_url, data.user_id, data.username, data.password) - print('custom done') - podcast_details = database_functions.functions.get_podcast_details(database_type, cnx, data.user_id, podcast_id) - return {"data": podcast_details} - except Exception as e: - logger.error(f"Failed to process the podcast: {str(e)}") - raise HTTPException(status_code=500, detail=f"Failed to process the podcast: {str(e)}") - else: - raise HTTPException(status_code=403, - detail="You can only add podcasts for yourself!") - -class PersonEpisodesRequest(BaseModel): - user_id: int - person_id: int - -@app.get("/api/data/person/episodes/{user_id}/{person_id}") -async def api_return_person_episodes( - user_id: int, - person_id: int, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException( - status_code=403, - detail="Your API key is either invalid or does not have correct permission" - ) - - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if key_id == user_id or is_web_key: - episodes = database_functions.functions.return_person_episodes(database_type, cnx, user_id, person_id) - if episodes is None: - episodes = [] - return {"episodes": episodes} - else: - raise HTTPException( - status_code=403, - detail="You can only view episodes for your own subscriptions!" - ) - -@app.get("/api/data/refresh_hosts") -async def refresh_all_hosts( - background_tasks: BackgroundTasks, - cnx=Depends(get_database_connection), is_admin: bool = Depends(check_if_admin), - api_key: str = Depends(get_api_key_from_header), -): - """Refresh episodes for all subscribed hosts""" - # Verify it's the system/web API key - if api_key != base_webkey.web_key: - raise HTTPException(status_code=403, detail="This endpoint requires system API key") - try: - cursor = cnx.cursor() - # Get all unique people that users are subscribed to - if database_type == "postgresql": - cursor.execute(""" - SELECT DISTINCT p.PersonID, p.Name, p.UserID - FROM "People" p - """) - else: # MySQL - cursor.execute(""" - SELECT DISTINCT p.PersonID, p.Name, p.UserID - FROM People p - """) - - subscribed_hosts = cursor.fetchall() - if not subscribed_hosts: - return {"message": "No subscribed hosts found"} - - # Process each host in the background - hosts_to_process = [] - for host in subscribed_hosts: - # Handle both tuple and dict result formats - if isinstance(host, dict): - person_id = host.get('PersonID', host.get('personid')) - person_name = host.get('Name', host.get('name')) - user_id = host.get('UserID', host.get('userid')) - else: # tuple - person_id, person_name, user_id = host - - hosts_to_process.append(person_name) - background_tasks.add_task( - process_person_subscription_task, - user_id, - person_id, - person_name - ) - - return { - "message": f"Refresh initiated for {len(subscribed_hosts)} hosts", - "hosts": hosts_to_process - } - except Exception as e: - logging.error(f"Error refreshing hosts: {str(e)}") - raise HTTPException(status_code=500, detail=str(e)) - -class PersonSubscribeRequest(BaseModel): - person_name: str - person_img: str - podcast_id: int - -@app.post("/api/data/person/subscribe/{user_id}/{person_id}") -async def api_subscribe_to_person( - user_id: int, - person_id: int, - request: PersonSubscribeRequest, - background_tasks: BackgroundTasks, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Invalid or unauthorized API key") - - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if key_id == user_id or is_web_key: - success, db_person_id = database_functions.functions.subscribe_to_person( - cnx, - database_type, - user_id, - person_id, - request.person_name, - request.person_img, - request.podcast_id - ) - - if success: - # Add background task to process the subscription using the actual PersonID - background_tasks.add_task( - process_person_subscription_task, - user_id, - db_person_id, # Use the actual PersonID from the database - request.person_name - ) - return { - "message": "Successfully subscribed to person", - "person_id": db_person_id # Return the actual person ID - } - else: - raise HTTPException(status_code=400, detail="Failed to subscribe to person") - else: - raise HTTPException(status_code=403, detail="You can only subscribe for yourself!") - -class UniqueShow(TypedDict): - title: str - feed_url: str - feed_id: int - -def process_person_subscription_task( - user_id: int, - person_id: int, - person_name: str -) -> None: - """Regular synchronous task for processing person subscription""" - cnx = create_database_connection() - try: - # Run the async function in a new event loop - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - loop.run_until_complete( - process_person_subscription(user_id, person_id, person_name, cnx) - ) - loop.close() - # After successful person subscription processing, trigger a server refresh - print("Person subscription processed, initiating server refresh...") - try: - refresh_pods_task() - print("Server refresh completed successfully") - except Exception as refresh_error: - print(f"Error during server refresh: {refresh_error}") - # Don't raise the error here - we don't want to fail the whole operation - # if just the refresh fails - pass - except Exception as e: - print(f"Error in process_person_subscription_task: {e}") - raise - finally: - close_database_connection(cnx) - -async def process_person_subscription( - user_id: int, - person_id: int, - person_name: str, - cnx -) -> None: - """Async function to process person subscription and gather their shows""" - print(f"Starting refresh for host: {person_name} (ID: {person_id})") - try: - # Set of unique shows (title, feed_url, feed_id) - processed_shows: Set[Tuple[str, str, int]] = set() - - # 1. Get podcasts from podpeople - async with httpx.AsyncClient(timeout=30.0) as client: - try: - podpeople_response = await client.get( - f"{people_url}/api/hostsearch", - params={"name": person_name} - ) - podpeople_response.raise_for_status() - podpeople_data = podpeople_response.json() - - # Check if we got valid data - if podpeople_data and podpeople_data.get("success"): - for podcast in podpeople_data.get("podcasts", []): - processed_shows.add(( - podcast['title'], - podcast['feed_url'], - podcast['id'] - )) - except Exception as e: - print(f"Error getting data from podpeople: {str(e)}") - # Continue execution even if podpeople lookup fails - pass - - # 2. Get podcasts from podcast index - print(f"API URL configured as: {api_url}") - async with httpx.AsyncClient(timeout=30.0) as client: - try: - index_response = await client.get( - f"{api_url}", - params={ - "query": person_name, - "index": "person", - "search_type": "person" - } - ) - index_response.raise_for_status() - index_data = index_response.json() - - if index_data and "items" in index_data: - for episode in index_data["items"]: - if all(field is not None for field in [episode.get("feedTitle"), episode.get("feedUrl"), episode.get("feedId")]): - processed_shows.add(( - episode["feedTitle"], - episode["feedUrl"], - episode["feedId"] - )) - except Exception as e: - print(f"Error getting data from podcast index: {str(e)}") - # Continue execution even if podcast index lookup fails - pass - - # Only continue if we found any shows - if not processed_shows: - print(f"No shows found for person: {person_name}") - return - - # 3. Process each unique show - for title, feed_url, feed_id in processed_shows: - try: - # First check if podcast exists for user - user_podcast_id = database_functions.functions.get_podcast_id( - database_type, - cnx, - user_id, - feed_url, - title - ) - - # Get podcast details and add as system podcast - podcast_values = database_functions.app_functions.get_podcast_values( - feed_url, - 1, # System UserID - None, - None, - False - ) - - if not user_podcast_id: - # Check if system podcast exists (UserID = 0) - system_podcast_id = database_functions.functions.get_podcast_id( - database_type, - cnx, - 1, # System UserID - feed_url, - title - ) - - if system_podcast_id is None: - # If not found for system, add as a new system podcast - podcast_values = database_functions.app_functions.get_podcast_values( - feed_url, - 1, # System UserID - None, - None, - False - ) - success = database_functions.functions.add_person_podcast( - cnx, - database_type, - podcast_values, - 1 # System UserID - ) - if success: - # Get the newly created podcast ID - system_podcast_id = database_functions.functions.get_podcast_id( - database_type, - cnx, - 1, # System UserID - feed_url, - title - ) - podcast_id = system_podcast_id - else: - podcast_id = user_podcast_id - - print(f"Using podcast: ID={podcast_id}, Title={title}") - # 4. Add episodes to PeopleEpisodes - database_functions.functions.add_people_episodes( - cnx, - database_type, - person_id=person_id, - podcast_id=podcast_id, - feed_url=feed_url, - ) - - except Exception as e: - logging.error(f"Error processing show {title}: {str(e)}") - continue - - except Exception as e: - logging.error(f"Error processing person subscription: {str(e)}") - raise - -class UnsubscribeRequest(BaseModel): - person_name: str - -@app.delete("/api/data/person/unsubscribe/{user_id}/{person_id}") -async def api_unsubscribe_from_person( - user_id: int, - person_id: int, - request: UnsubscribeRequest, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Invalid or unauthorized API key") - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - if key_id == user_id or is_web_key: - success = database_functions.functions.unsubscribe_from_person(cnx, database_type, user_id, person_id, request.person_name) - if success: - return {"message": "Successfully unsubscribed from person"} - else: - raise HTTPException(status_code=400, detail="Failed to unsubscribe from person") - else: - raise HTTPException(status_code=403, detail="You can only unsubscribe for yourself!") - -@app.get("/api/data/person/subscriptions/{user_id}") -async def api_get_person_subscriptions( - user_id: int, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, detail="Invalid or unauthorized API key") - - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - if key_id == user_id or is_web_key: - subscriptions = database_functions.functions.get_person_subscriptions(cnx, database_type, user_id) - return {"subscriptions": subscriptions} - else: - raise HTTPException(status_code=403, detail="You can only view your own subscriptions!") - - -@app.get("/api/data/stream/{episode_id}") -async def stream_episode( - episode_id: int, - cnx=Depends(get_database_connection), - api_key: str = Query(..., alias='api_key'), - user_id: int = Query(..., alias='user_id'), - source_type: str = Query(None, alias='type') -): - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - if not key_id and not is_web_key: - rss_key = database_functions.functions.get_rss_key_if_valid(cnx, database_type, api_key) - if not rss_key: - raise HTTPException(status_code=403, detail="Invalid API key") - key_id = rss_key.get('user_id') - universal_key = (not rss_key.get('podcast_ids') or len(rss_key.get('podcast_ids')) == 0 or -1 in rss_key.get('podcast_ids')) - if not universal_key and not database_functions.functions.validate_episode_access(cnx, database_type, episode_id, rss_key.get('podcast_ids')): - raise HTTPException(status_code=403, detail="You do not have permission to access this episode") - - if key_id == user_id or is_web_key: - # Choose which lookup to use based on source_type - if source_type == "youtube": - file_path = database_functions.functions.get_youtube_video_location(cnx, database_type, episode_id, user_id) - print(f'file path in if source youtube {file_path}') - else: - file_path = database_functions.functions.get_download_location(cnx, database_type, episode_id, user_id) - print(f'file path in if source else {file_path}') - - if file_path: - # Don't set filename to allow streaming instead of forced download - return FileResponse(path=file_path, media_type='audio/mpeg') - else: - raise HTTPException(status_code=404, detail="Episode not found or not downloaded") - else: - raise HTTPException(status_code=403, detail="You do not have permission to access this episode") - -class UpdateGpodderSyncRequest(BaseModel): - enabled: bool - -@app.post("/api/data/gpodder/toggle") -async def toggle_gpodder_sync( - request: UpdateGpodderSyncRequest, - background_tasks: BackgroundTasks, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - """Enable or disable gpodder sync for the current user""" - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException( - status_code=403, - detail="Your API key is either invalid or does not have correct permission" - ) - # Get the user ID from the API key - user_id_result = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - print(f"User ID result: {user_id_result}") - if isinstance(user_id_result, dict): - user_id = user_id_result.get('userid') - else: - user_id = user_id_result[0] if isinstance(user_id_result, tuple) else user_id_result - if not user_id: - raise HTTPException(status_code=403, detail="Invalid API key") - try: - print(f"Request to toggle gpodder sync: {request.enabled}") - user_data = database_functions.functions.get_user_gpodder_status(cnx, database_type, user_id) - if not user_data: - raise HTTPException(status_code=404, detail="User not found") - # Get initial state - current_sync_type = user_data["sync_type"] - print(f"Current sync type: {current_sync_type}") - device_info = None - if request.enabled: - # Enable gpodder sync - result = database_functions.functions.set_gpodder_internal_sync(cnx, database_type, user_id) - if not result: - raise HTTPException(status_code=500, detail="Failed to enable gpodder sync") - device_info = result - - # Get required parameters for refresh_gpodder_subscription - gpodder_settings = database_functions.functions.get_gpodder_settings(database_type, cnx, user_id) - gpodder_token = gpodder_settings.get("gpoddertoken", "") - gpodder_login = gpodder_settings.get("gpodderloginname", "") - print(gpodder_settings) - - # Get the updated sync type after enabling - updated_user_data = database_functions.functions.get_user_gpodder_status(cnx, database_type, user_id) - updated_sync_type = updated_user_data["sync_type"] - # gpodder_login = gpodder_settings.get("gpodderloginname", "") - # gpodder_token = gpodder_settings.get("gpoddertoken", "") - device_id = device_info.get("device_id") if device_info else None - device_name = device_info.get("device_name") if device_info else None - - background_tasks.add_task( - refresh_gpodder_subscription_for_background, # Use the wrapper function - database_type, - user_id, - 'http://localhost:8042', - gpodder_token, - gpodder_login, - updated_sync_type, - device_id, - device_name, - False # is_remote - ) - print(f"Added background task to sync gpodder for user: {user_id}") - else: - # Disable gpodder sync - success = database_functions.functions.disable_gpodder_internal_sync(cnx, database_type, user_id) - if not success: - raise HTTPException(status_code=500, detail="Failed to disable gpodder sync") - # Get updated state after changes - updated_data = database_functions.functions.get_user_gpodder_status(cnx, database_type, user_id) - new_sync_type = updated_data["sync_type"] - print(f"Updated sync type: {new_sync_type}") - response = { - "sync_type": new_sync_type, - "gpodder_enabled": new_sync_type in ["gpodder", "both"], - "external_enabled": new_sync_type in ["external", "both"], - "external_url": updated_data.get("gpodder_url") if new_sync_type in ["external", "both"] else None, - "api_url": "http://localhost:8042" if new_sync_type in ["gpodder", "both"] else None - } - # Add device information if available - if device_info and request.enabled: - response["device_name"] = device_info["device_name"] - response["device_id"] = device_info["device_id"] - print(f"Returning response: {response}") - return response - except Exception as e: - print(f"Error in toggle_gpodder_sync: {e}") - raise HTTPException(status_code=500, detail=f"Internal server error: {str(e)}") - - -def refresh_gpodder_subscription_for_background(database_type, user_id, gpodder_url, gpodder_token, - gpodder_login, sync_type, device_id=None, device_name=None, is_remote=False): - """Wrapper function for background tasks to ensure proper database connection handling""" - from database_functions.db_client import create_database_connection, close_database_connection - import logging - - logger = logging.getLogger(__name__) - - # Create a new connection explicitly for this background task - cnx = create_database_connection() - - try: - print(f"Starting background refresh for user {user_id} with sync_type {sync_type}") - # Call the original function with our managed connection - success = database_functions.functions.refresh_gpodder_subscription( - database_type, - cnx, - user_id, - gpodder_url, - gpodder_token, - gpodder_login, - sync_type, - device_id, - device_name, - is_remote - ) - return success - except Exception as e: - logger.error(f"Error in background gpodder refresh: {str(e)}") - return False - finally: - # Always close the connection we created - close_database_connection(cnx) - print(f"Closed database connection for background task for user {user_id}") -# Helper function to generate a token for internal gpodder API -def generate_gpodder_token(user_id): - import secrets - token = secrets.token_hex(16) - return f"internal_gpodder_{user_id}_{token}" - -@app.get("/api/data/gpodder/status") -async def get_gpodder_status( - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - """Get the current gpodder sync status for the user""" - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException( - status_code=403, - detail="Your API key is either invalid or does not have correct permission" - ) - - # Get the user ID from the API key - user_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - if not user_id: - raise HTTPException(status_code=403, detail="Invalid API key") - - try: - user_data = database_functions.functions.get_user_gpodder_status(cnx, database_type, user_id) - - if not user_data: - raise HTTPException(status_code=404, detail="User not found") - - sync_type = user_data["sync_type"] - - return { - "sync_type": sync_type, - "gpodder_enabled": sync_type in ["gpodder", "both"], - "external_enabled": sync_type in ["external", "both"], - "external_url": user_data["gpodder_url"], - "api_url": "http://localhost:8042" # Replace with actual API URL if needed - } - except Exception as e: - print(f"Error in get_gpodder_status: {e}") - raise HTTPException(status_code=500, detail=f"Internal server error: {str(e)}") - -class BackupUser(BaseModel): - user_id: int - - -@app.post("/api/data/backup_user", response_class=PlainTextResponse) -async def backup_user(data: BackupUser, cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header)): - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException(status_code=403, - detail="Your API key is either invalid or does not have correct permission") - - # Check if the provided API key is the web key - is_web_key = api_key == base_webkey.web_key - - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - # Allow the action if the API key belongs to the user or it's the web API key - if key_id == data.user_id or is_web_key: - try: - opml_data = database_functions.functions.backup_user(database_type, cnx, data.user_id) - except Exception as e: - raise HTTPException(status_code=400, detail=str(e)) - return opml_data - else: - raise HTTPException(status_code=403, - detail="You can only make backups for yourself!") - - -class BackupServerRequest(BaseModel): - database_pass: str - -@app.post("/api/data/backup_server", response_class=PlainTextResponse) -async def backup_server(request: BackupServerRequest, is_admin: bool = Depends(check_if_admin), cnx=Depends(get_database_connection)): - # logging.info(f"request: {request}") - if not is_admin: - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Not authorized") - try: - dump_data = database_functions.functions.backup_server(database_type, cnx, request.database_pass) - except Exception as e: - raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e)) - return Response(content=dump_data, media_type="text/plain") - -@app.post("/api/data/restore_server") -async def api_restore_server( - background_tasks: BackgroundTasks, - backup_file: UploadFile, - database_pass: str = Form(...), - is_admin: bool = Depends(check_if_admin), - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - if not is_admin: - raise HTTPException(status_code=403, detail="Not authorized") - - if not backup_file.filename.endswith('.sql'): - raise HTTPException(status_code=400, detail="Invalid file type. Only .sql files are allowed") - - file_content = await backup_file.read() - if len(file_content) > 100 * 1024 * 1024: # 100MB limit - raise HTTPException(status_code=413, detail="File too large") - - logging.info(f"Restoring server with uploaded backup file") - background_tasks.add_task(restore_server_fun, database_pass, file_content) - return JSONResponse(content={"detail": "Server restoration started."}) - -def restore_server_fun(database_pass: str, server_restore_data: str): - # Assuming create_database_connection and restore_server are defined in database_functions.functions - cnx = create_database_connection() # Replace with your method to create a new DB connection - try: - # Restore server using the provided password and data - database_functions.functions.restore_server(cnx, database_pass, server_restore_data) - finally: - cnx.close() - -@app.get("/api/data/rss_feed_status") -async def get_rss_feed_status( - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - """Get RSS feed enabled status for current user""" - try: - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - print(f'user_id for rss: {key_id}') - if not key_id: - raise HTTPException(status_code=403, detail="Invalid API key") - - status = database_functions.functions.get_rss_feed_status(cnx, database_type, key_id) - print(status) - return status - - except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) - -@app.post("/api/data/toggle_rss_feeds") -async def toggle_rss_feeds_endpoint( - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - """Toggle RSS feed status for current user""" - try: - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - if not key_id: - raise HTTPException(status_code=403, detail="Invalid API key") - new_status = database_functions.functions.toggle_rss_feeds(cnx, database_type, key_id) - return {"success": True, "enabled": new_status} - except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) - -@app.get("/api/feed/{user_id}") -async def get_user_feed( - request: Request, - user_id: int, - api_key: str, # Now a query parameter - limit: int = 1000, - podcast_id: Optional[int] = None, - source_type: str = Query(None, alias='type'), - cnx=Depends(get_database_connection) -): - """Get RSS feed for all podcasts or a specific podcast""" - print(f'user: {user_id}, api: {api_key}') - print(f'podcast_id parameter: {podcast_id}, type: {type(podcast_id)}') - print(f'podcast_id_list will be: {[podcast_id] if podcast_id is not None else None}') - try: - domain = os.getenv('HOSTNAME', f'{request.url.scheme}://{request.url.hostname}:{request.url.port or 80}') - - - # Convert single podcast_id to list format if provided - podcast_id_list = [podcast_id] if podcast_id is not None else None - - rss_key = database_functions.functions.get_rss_key_if_valid(cnx, database_type, api_key, podcast_id_list) - - # TODO: remove this once backwards compatibility is no longer needed - if not rss_key: - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - if not key_id: - raise HTTPException(status_code=403, detail="Invalid API key") - rss_key = { - "podcast_ids": [ -1 ], - "user_id": key_id, - "key": api_key - } - - feed_content = database_functions.functions.generate_podcast_rss( - database_type, - cnx, - rss_key, - limit, - source_type, - domain, - podcast_id=podcast_id_list - ) - return Response( - content=feed_content, - media_type="application/rss+xml" - ) - except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) - -@app.post("/api/data/rss_feed_status/{user_id}") -async def toggle_rss_feeds( - user_id: int, - enable: bool, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - """Enable or disable RSS feeds for a user""" - try: - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - if not key_id: - raise HTTPException(status_code=403, detail="Invalid API key") - - new_status = database_functions.functions.set_rss_feed_status(cnx, database_type, user_id, enable) - return {"status": "success", "enabled": new_status} - - except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) - - -@app.get("/api/data/rss_key") -async def get_user_rss_key( - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - """Get the RSS key for the current user""" - try: - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - if not key_id: - raise HTTPException(status_code=403, detail="Invalid API key") - - rss_key = database_functions.functions.get_user_rss_key(cnx, database_type, key_id) - if not rss_key: - raise HTTPException(status_code=404, detail="No RSS key found. Please enable RSS feeds first.") - - return {"rss_key": rss_key} - - except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) - - -class YouTubeChannel(BaseModel): - channel_id: str - name: str - description: str - subscriber_count: Optional[int] - url: str - video_count: Optional[int] - thumbnail_url: Optional[str] - recent_videos: List[dict] = [] - - class Config: - json_encoders = { - list: lambda v: v # Preserve lists during JSON encoding - } - -@app.get("/api/data/search_youtube_channels") -async def search_youtube_channels( - query: str, - max_results: int = 5, - user_id: int = None, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - # Validate API key - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException( - status_code=403, - detail="Your API key is either invalid or does not have correct permission" - ) - - # Check if web key and verify user permission - is_web_key = api_key == base_webkey.web_key - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - if not (key_id == user_id or is_web_key): - raise HTTPException( - status_code=403, - detail="You can only search with your own account." - ) - - try: - # First get channel ID using a search - search_url = f"ytsearch{max_results*4}:{query}" - - ydl_opts = { - 'quiet': True, - 'extract_flat': True, - 'no_warnings': True, - 'skip_download': True, - 'extract_info': True, - } - - with YoutubeDL(ydl_opts) as ydl: - logging.info(f"Searching YouTube with query: {query}") - results = ydl.extract_info(search_url, download=False) - - if not results or 'entries' not in results: - return {"results": []} - - processed_results = [] - seen_channels = set() # Track unique channels - channel_videos = {} - - for entry in results.get('entries', []): - try: - channel_id = entry.get('channel_id') or entry.get('uploader_id') - if not channel_id: - continue - - # First collect the video regardless of whether we've seen the channel - if channel_id not in channel_videos: - channel_videos[channel_id] = [] - if len(channel_videos[channel_id]) < 3: # Limit to 3 videos - channel_videos[channel_id].append({ - 'id': entry.get('id', ''), - 'title': entry.get('title', ''), - 'duration': entry.get('duration'), - 'url': f"https://www.youtube.com/watch?v={entry.get('id')}" - }) - print(f"Added video to channel {channel_id}, now has {len(channel_videos[channel_id])} videos") - - - # Now check if we've already processed this channel - if channel_id in seen_channels: - continue - - seen_channels.add(channel_id) - - # Get minimal channel info - channel_opts = ydl_opts.copy() - channel_opts['extract_flat'] = True - channel_opts['process'] = False - - channel_url = f"https://www.youtube.com/channel/{channel_id}" - channel_info = ydl.extract_info( - channel_url, - download=False, - process=False # Don't process more than necessary - ) - - # Get avatar URL from channel info - thumbnail_url = None - if channel_info and channel_info.get('thumbnails'): - # Try to find avatar-specific thumbnails first - avatar_thumbnails = [t for t in channel_info['thumbnails'] - if t.get('id', '').startswith('avatar')] - - if avatar_thumbnails: - # Get the largest avatar thumbnail - thumbnail_url = avatar_thumbnails[-1]['url'] - else: - # Fallback: try to find any thumbnail with "avatar" in the URL - avatar_thumbnails = [t for t in channel_info['thumbnails'] - if 'avatar' in t.get('url', '').lower()] - if avatar_thumbnails: - thumbnail_url = avatar_thumbnails[-1]['url'] - else: - # Last resort: use the first thumbnail - thumbnail_url = channel_info['thumbnails'][0]['url'] - print(f"Creating channel {channel_id} with {len(channel_videos[channel_id])} videos") - channel = YouTubeChannel( - channel_id=channel_id, - name=entry.get('channel', '') or entry.get('uploader', ''), - description=entry.get('description', '')[:500] if entry.get('description') else '', - subscriber_count=None, - url=f"https://www.youtube.com/channel/{channel_id}", - video_count=None, - thumbnail_url=thumbnail_url or entry.get('channel_thumbnail', ''), - recent_videos=channel_videos[channel_id] # <-- Use our collected videos here - ) - - if len(processed_results) < max_results: - channel_dict = channel.dict() - channel_dict['recent_videos'] = channel_videos[channel_id] # Explicitly set after dict conversion - processed_results.append(channel_dict) - else: - break - - except Exception as entry_error: - logging.error(f"Error processing channel entry: {entry_error}") - continue - - logging.info(f"Found {len(processed_results)} channels") - return {"results": processed_results} - - except Exception as e: - logging.error(f"YouTube channel search error: {str(e)}") - raise HTTPException( - status_code=500, - detail=f"Error searching YouTube channels: {str(e)}" - ) - -def process_youtube_channel(podcast_id: int, channel_id: str, feed_cutoff: int): - cnx = create_database_connection() - try: - database_functions.youtube.process_youtube_videos(database_type, podcast_id, channel_id, cnx, feed_cutoff) - finally: - close_database_connection(cnx) - -@app.post("/api/data/youtube/subscribe") -async def subscribe_to_youtube_channel( - channel_id: str, - user_id: int, - background_tasks: BackgroundTasks, - feed_cutoff: int = 30, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - """Subscribe to a YouTube channel""" - import logging - logger = logging.getLogger(__name__) - - try: - logger.info(f"Starting subscription for channel {channel_id}") - - existing_id = database_functions.functions.check_existing_channel_subscription(cnx, database_type, channel_id, user_id) - if existing_id: - logger.info(f"Channel {channel_id} already subscribed") - return { - "success": True, - "podcast_id": existing_id, - "message": "Already subscribed to this channel" - } - - logger.info("Getting channel info") - channel_info = await database_functions.youtube.get_channel_info(channel_id) - - logger.info("Adding channel to database") - podcast_id = database_functions.functions.add_youtube_channel(cnx, database_type, channel_info, user_id, feed_cutoff) - - logger.info(f"Starting background task for podcast_id {podcast_id}") - background_tasks.add_task(process_youtube_channel, podcast_id, channel_id, feed_cutoff) - - logger.info("Subscription completed successfully") - return { - "success": True, - "podcast_id": podcast_id, - "message": "Channel subscription initiated. Videos will be processed in background." - } - except Exception as e: - logger.error(f"Error subscribing to channel: {str(e)}", exc_info=True) - raise HTTPException( - status_code=500, - detail=f"Error subscribing to channel: {str(e)}" - ) - -@app.post("/api/auth/store_state") -async def store_oidc_state( - request: Request, -): - try: - data = await request.json() - state = data.get('state') - client_id = data.get('client_id') - - if not state or not client_id: - raise HTTPException(status_code=400, detail="Missing state or client_id") - - success = database_functions.oidc_state_manager.oidc_state_manager.store_state(state, client_id) - if not success: - raise HTTPException(status_code=500, detail="Failed to store state") - - return {"status": "success"} - except Exception as e: - logging.error(f"Error storing OIDC state: {str(e)}") - raise HTTPException(status_code=500, detail="Failed to store state") - -@app.get("/api/auth/callback") -async def oidc_callback( - request: Request, - code: str, - state: str = None, - cnx=Depends(get_database_connection) -): - try: - base_url = str(request.base_url)[:-1] - # Force HTTPS if running in production - if not base_url.startswith('http://localhost'): - if base_url.startswith('http:'): - base_url = 'https:' + base_url[5:] - - print(f"Base URL: {base_url}") - frontend_base = base_url.replace('/api', '') - - # Get client_id from query parameters - client_id = database_functions.oidc_state_manager.oidc_state_manager.get_client_id(state) - if not client_id: - return RedirectResponse( - url=f"{frontend_base}/oauth/callback?error=invalid_state" - ) - - registered_redirect_uri = f"{base_url}/api/auth/callback" - print(f"Using redirect_uri: {registered_redirect_uri}") - - # Get OIDC provider details - provider = database_functions.functions.get_oidc_provider(cnx, database_type, client_id) - if not provider: - return RedirectResponse( - url=f"{frontend_base}/oauth/callback?error=invalid_provider" - ) - - # Unpack provider details - provider_id, client_id, client_secret, token_url, userinfo_url, name_claim, email_claim, username_claim, roles_claim, user_role, admin_role = provider - - # Exchange authorization code for access token - async with httpx.AsyncClient() as client: - try: - token_response = await client.post( - token_url, - data={ - "grant_type": "authorization_code", - "code": code, - "redirect_uri": registered_redirect_uri, - "client_id": client_id, - "client_secret": client_secret, - }, - headers={ - "Accept": "application/json" - } - ) - - if token_response.status_code != 200: - return RedirectResponse( - url=f"{frontend_base}/oauth/callback?error=token_exchange_failed" - ) - - token_data = token_response.json() - print(f"Token response: {token_data}") - access_token = token_data.get("access_token") - - # Get user info from OIDC provider - headers = { - "Authorization": f"Bearer {access_token}", - "User-Agent": "PinePods/1.0", # Add a meaningful user agent - "Accept": "application/json" - } - userinfo_response = await client.get(userinfo_url, headers=headers) - - if userinfo_response.status_code != 200: - error_content = userinfo_response.text - print(f"GitHub API error: {error_content}") - return RedirectResponse( - url=f"{frontend_base}/oauth/callback?error=userinfo_failed" - ) - - user_info = userinfo_response.json() - print(f"User info response: {user_info}") - email = user_info.get(email_claim or "email") - - parsed_url = urlparse(userinfo_url) - if not email and parsed_url.hostname == 'api.github.com': - # For GitHub, we may need to make a separate request for emails - # because GitHub doesn't include email in user info if it's private - emails_response = await client.get( - 'https://api.github.com/user/emails', - headers=headers - ) - - if emails_response.status_code == 200: - emails = emails_response.json() - # Find the primary email - for email_obj in emails: - if email_obj.get('primary') and email_obj.get('verified'): - email = email_obj.get('email') - break - - # If no primary found, take the first verified one - if not email: - for email_obj in emails: - if email_obj.get('verified'): - email = email_obj.get('email') - break - - if not email: - return RedirectResponse( - url=f"{frontend_base}/oauth/callback?error=email_required" - ) - - except httpx.RequestError: - return RedirectResponse( - url=f"{frontend_base}/oauth/callback?error=network_error" - ) - - # Verify access. - if roles_claim and user_role: - roles = user_info.get(roles_claim) - if not isinstance(roles, list): - print(f'Claim {roles_claim} should be a list of strings, but it is {roles}.') - return RedirectResponse( - url=f"{frontend_base}/oauth/callback?error=no_access&details=invalid_roles" - ) - if user_role not in roles and not (admin_role and admin_role in roles): - print(f"User user role {user_role} {f'and admin role {admin_role}' if admin_role else ''} not in user's roles ({roles}), denying access.") - return RedirectResponse( - url=f"{frontend_base}/oauth/callback?error=no_access" - ) - - # Check if user exists - user = database_functions.functions.get_user_by_email(cnx, database_type, email) - - # In your OIDC callback function, replace the user creation section with: - - # Determine the user's information - fullname = user_info.get(name_claim or "name", "") - if username_claim and username_claim not in user_info: - print(f"Unable to determine username for user, username claim {username_claim} not present") - return RedirectResponse( - url=f"{frontend_base}/oauth/callback?error=user_creation_failed&details=username_claim_missing" - ) - username = user_info.get(username_claim or "preferred_username") - - if not user: - # Create new user - print(f"User with email {email} not found, creating new user") - - if username is None: - username = email.split("@")[0].lower() - base_username = username - counter = 1 - max_attempts = 10 - - while counter <= max_attempts: - try: - print(f"Attempt {counter} to create user with base username: {base_username}") - user_id = database_functions.functions.create_oidc_user( - cnx, database_type, email, fullname, username - ) - print(f"User created successfully with ID: {user_id}") - - if not user_id: - print(f"ERROR: Invalid user_id returned: {user_id}") - return RedirectResponse( - url=f"{frontend_base}/oauth/callback?error=invalid_user_id" - ) - - print(f"Creating API key for user_id: {user_id}") - api_key = database_functions.functions.create_api_key(cnx, database_type, user_id) - print(f"API key created: {api_key[:5]}... (truncated for security)") - break - except UniqueViolation: - print(f"Username conflict with {username}, trying next variation") - username = f"{base_username}{counter}" - counter += 1 - if counter > max_attempts: - print(f"Failed to create user after {max_attempts} attempts due to username conflicts") - return RedirectResponse( - url=f"{frontend_base}/oauth/callback?error=username_conflict" - ) - except Exception as e: - print(f"Error during user creation: {str(e)}") - import traceback - print(f"Traceback: {traceback.format_exc()}") - return RedirectResponse( - url=f"{frontend_base}/oauth/callback?error=user_creation_failed&details={str(e)[:50]}" - ) - else: - print("Failed to create user after maximum attempts") - return RedirectResponse( - url=f"{frontend_base}/oauth/callback?error=user_creation_failed" - ) - - else: - try: - print(f"Attempt to create user with username: {username}") - user_id = database_functions.functions.create_oidc_user( - cnx, database_type, email, fullname, username - ) - print(f"User created successfully with ID: {user_id}") - - if not user_id: - print(f"ERROR: Invalid user_id returned: {user_id}") - return RedirectResponse( - url=f"{frontend_base}/oauth/callback?error=invalid_user_id" - ) - - print(f"Creating API key for user_id: {user_id}") - api_key = database_functions.functions.create_api_key(cnx, database_type, user_id) - print(f"API key created: {api_key[:5]}... (truncated for security)") - except UniqueViolation: - print("Failed to create user due to username conflicts") - return RedirectResponse( - url=f"{frontend_base}/oauth/callback?error=username_conflict" - ) - except Exception as e: - print(f"Error during user creation: {str(e)}") - import traceback - print(f"Traceback: {traceback.format_exc()}") - return RedirectResponse( - url=f"{frontend_base}/oauth/callback?error=user_creation_failed&details={str(e)[:50]}" - ) - - else: - # Existing user - retrieve their API key - print(f"User with email {email} found, retrieving API key") - user_id = user[0] if isinstance(user, tuple) else user['userid'] # Adjust based on your DB return format - - api_key = database_functions.functions.get_user_api_key(cnx, database_type, user_id) - if not api_key: - print(f"No API key found for user_id: {user_id}, creating a new one") - api_key = database_functions.functions.create_api_key(cnx, database_type, user_id) - - print(f"API key retrieved: {api_key[:5]}... (truncated for security)") - - # Update user info based on OIDC information. - database_functions.functions.set_fullname(cnx, database_type, user_id, fullname) - - current_username = user[2] if isinstance(user, tuple) else user['username'] - if username_claim and username != current_username: - if database_functions.functions.check_usernames(cnx, database_type, username): - print(f'Unable to update username for user {user_id} to match the username specified by the OIDC provider ({username}) as this is already in use by another user.') - else: - database_functions.functions.set_username(cnx, database_type, user_id, username) - - # Update admin role based on OIDC roles. - if roles_claim and admin_role: - roles = user_info.get(roles_claim) - if not isinstance(roles, list): - print(f'Claim {roles_claim} should be a list of strings, but it is {roles}.') - return RedirectResponse( - url=f"{frontend_base}/oauth/callback?error=no_access&details=invalid_roles" - ) - database_functions.functions.set_isadmin(cnx, database_type, user_id, admin_role in roles) - - # Success case - redirect with API key - return RedirectResponse(url=f"{frontend_base}/oauth/callback?api_key={api_key}") - - except Exception as e: - logging.error(f"OIDC callback error: {str(e)}") - return RedirectResponse( - url=f"{frontend_base}/oauth/callback?error=authentication_failed" - ) - -# Store active connections -class ConnectionManager: - def __init__(self): - # Map of user_id to list of websocket connections - self.active_connections: Dict[int, List[WebSocket]] = {} - - async def connect(self, websocket: WebSocket, user_id: int): - await websocket.accept() - if user_id not in self.active_connections: - self.active_connections[user_id] = [] - self.active_connections[user_id].append(websocket) - - def disconnect(self, websocket: WebSocket, user_id: int): - if user_id in self.active_connections: - if websocket in self.active_connections[user_id]: - self.active_connections[user_id].remove(websocket) - if not self.active_connections[user_id]: - del self.active_connections[user_id] - - async def broadcast_to_user(self, user_id: int, message: Dict[str, Any]): - if user_id in self.active_connections: - # Convert to JSON once for efficiency - json_message = json.dumps(message) - disconnected = [] - - # Send to all connections for this user - for websocket in self.active_connections[user_id]: - try: - await websocket.send_text(json_message) - except Exception: - disconnected.append(websocket) - - # Clean up any failed connections - for websocket in disconnected: - self.disconnect(websocket, user_id) - -# Initialize connection manager -manager = ConnectionManager() - -# Define the broadcast message model -class BroadcastMessage(BaseModel): - user_id: int - message: Dict[str, Any] - -@app.post("/api/tasks/broadcast") -async def broadcast_task_update( - data: BroadcastMessage, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - """Endpoint to broadcast a task update to a user via WebSocket""" - - # Verify API key - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException( - status_code=403, - detail="Your API key is either invalid or does not have correct permission" - ) - - # Check if manager has the user in active connections - user_id = data.user_id - has_connections = user_id in manager.active_connections - print(f"Broadcasting to user {user_id}, has connections: {has_connections}") - - if has_connections: - # Broadcast the message - await manager.broadcast_to_user(user_id, data.message) - return {"success": True, "message": f"Broadcast sent to user {user_id}"} - else: - print(f"No active connections for user {user_id}") - return {"success": False, "message": f"No active connections for user {user_id}"} - -# Model for task query parameters -class TaskQueryParams(BaseModel): - user_id: int - -# Extract API key from WebSocket query parameters -async def get_api_key_from_websocket(websocket: WebSocket) -> str: - query_params = websocket.query_params - api_key = query_params.get("api_key") - - if not api_key: - raise ValueError("API key is required") - - return api_key - -@app.get("/api/tasks/active") -async def get_active_tasks( - user_id: int, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - # Verify API key - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException( - status_code=403, - detail="Your API key is either invalid or does not have correct permission" - ) - - # Check if user has permission to access these tasks - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - is_web_key = api_key == base_webkey.web_key - - if key_id != user_id and not is_web_key: - raise HTTPException( - status_code=403, - detail="You can only view your own tasks" - ) - - # Get all active tasks for the user - this needs to be expanded - # to include all types of tasks, not just downloads - active_tasks = database_functions.tasks.get_all_active_tasks(user_id) - - return {"tasks": active_tasks} - -# Add this DEBUG logging to your FastAPI WebSocket endpoint in clientapi.py -@app.websocket("/ws/api/tasks/{user_id}") -async def websocket_endpoint( - websocket: WebSocket, - user_id: int, - cnx=Depends(get_database_connection) -): - print(f"WebSocket connection request received for user {user_id}") - # Get API key from websocket query params - try: - api_key = await get_api_key_from_websocket(websocket) - print(f"WebSocket API key validated for user {user_id}") - - # Verify API key - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - print(f"Invalid API key for WebSocket connection, user {user_id}") - await websocket.close(code=status.WS_1008_POLICY_VIOLATION) - return - - # Check if user has permission - key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - is_web_key = api_key == base_webkey.web_key - - if key_id != user_id and not is_web_key: - print(f"Permission denied for WebSocket connection, user {user_id}") - await websocket.close(code=status.WS_1008_POLICY_VIOLATION) - return - - # Accept the connection - await manager.connect(websocket, user_id) - print(f"WebSocket connection accepted for user {user_id}") - - # Send initial task list with all types of tasks - active_tasks = database_functions.tasks.get_all_active_tasks(user_id) - print(f"Found {len(active_tasks)} active tasks for user {user_id}") - await websocket.send_text(json.dumps({ - "event": "initial", - "tasks": active_tasks - })) - - # Keep connection alive and handle messages - try: - while True: - # Handle any incoming messages (client might request refresh) - message = await websocket.receive_text() - data = json.loads(message) - print(f"Received WebSocket message from user {user_id}: {data}") - - if data.get("action") == "refresh": - # Send updated task list with all tasks - active_tasks = database_functions.tasks.get_all_active_tasks(user_id) - await websocket.send_text(json.dumps({ - "event": "refresh", - "tasks": active_tasks - })) - - # Wait a short while before next iteration - await asyncio.sleep(0.1) - - except WebSocketDisconnect: - print(f"WebSocket disconnected for user {user_id}") - manager.disconnect(websocket, user_id) - - except Exception as e: - print(f"WebSocket error for user {user_id}: {str(e)}") - try: - await websocket.close(code=status.WS_1011_INTERNAL_ERROR) - except: - pass - - -class InitRequest(BaseModel): - api_key: str - -@app.post("/api/init/startup_tasks") -async def run_startup_tasks(request: InitRequest, cnx=Depends(get_database_connection)): - try: - print('start of startup') - # Verify if the API key is valid - is_valid = database_functions.functions.verify_api_key(cnx, database_type, request.api_key) - web_key = database_functions.functions.get_web_key(cnx, database_type) - # Check if the provided API key is the web key - is_web_key = request.api_key == web_key - - if not is_valid or not is_web_key: - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid or unauthorized API key") - - # Execute the startup tasks - database_functions.functions.add_news_feed_if_not_added(database_type, cnx) - return {"status": "Startup tasks completed successfully."} - - database_functions.valkey_client.connect() - except Exception as e: - logger.error(f"Error in startup tasks: {e}") - raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Failed to complete startup tasks") - finally: - # The connection will automatically be closed by FastAPI's dependency system - pass - -async def async_tasks(): - # Start cleanup task - logging.info("Starting cleanup tasks") - asyncio.create_task(cleanup_temp_mfa_secrets()) - - -if __name__ == '__main__': - raw_debug_mode = os.environ.get("DEBUG_MODE", "False") - DEBUG_MODE = raw_debug_mode.lower() == "true" - if DEBUG_MODE: - logging.info("Debug Mode Enabled") - else: - logging.info("Debug Mode Disabled") - config_file = "/pinepods/startup/logging_config_debug.ini" if DEBUG_MODE else "/pinepods/startup/logging_config.ini" - logging.info(config_file) - parser = argparse.ArgumentParser() - parser.add_argument('--port', type=int, default=8032, help='Port to run the server on') - args = parser.parse_args() - asyncio.run(async_tasks()) - - import uvicorn - - uvicorn.run( - "clientapi:app", - host="0.0.0.0", - port=args.port, - log_config=config_file, - limit_concurrency=1000, - ) diff --git a/completed_todos.md b/completed_todos.md index 190a2690..9346c606 100644 --- a/completed_todos.md +++ b/completed_todos.md @@ -13,6 +13,32 @@ Major Version: - [ ] Fix episode spacing on queue page. The context button still shows even on smallest screens - [ ] Check youtube download Issues when changing the download time +0.8.2 + +- [x] Translations on the web app +- [x] Account Settings now updates dropdowns with pre-populated values +- [x] episode-layout (podcast page) will now set sort settings based on pod id +- [x] Added endpoint to delete OIDC settings +- [x] Added endpoint to Edit OIDC settings +- [x] Manually search or enter podcast index id for matching to podcast index +- [x] OIDC Setup on start +- [x] Better errors if needed vars are missing +- [x] Redis/Valkey Authentication +- [x] Move Episode Addition process to the background when adding a podcast +- [x] Support HTTP request notifications. Will work with Telegram and quite a few other basic http notification platforms +- [x] Podcast Merge Options +- [x] Individual Episode download on /episode page +- [x] Option to use Podcast covers if desired +- [x] Fix issue where release date on podcasts not added shows as current date/time +- [x] Fix yt-dlp issues + +- [x] Gpodder Completion Set Bug where if episode played length was exactly the length of the podcast episode it wouldn't mark complete +- [x] Fixed issue with auto complete threshold. Will now mark historical episodes complete when enabled +- [x] Some sort of loading indicator for the single ep download +- [x] Fix issue where duplicate episodes were created if details of the episode were updated +- [x] Fully dynamic Playlist implementation +- [x] Checking on rss feeds returning downloaded urls correctly + 0.7.9 - [x] Finish implementing long finger press - fix on iOS (close, it doesn't auto close when clicking away currently) diff --git a/database_functions/app_functions.py b/database_functions/app_functions.py deleted file mode 100644 index 82c56678..00000000 --- a/database_functions/app_functions.py +++ /dev/null @@ -1,289 +0,0 @@ -from typing import Optional - -def send_email(server_name, server_port, from_email, to_email, send_mode, encryption, auth_required, username, password, subject, body): - import smtplib - from email.mime.multipart import MIMEMultipart - from email.mime.text import MIMEText - import ssl - import socket - - try: - if send_mode == "SMTP": - # Set up the SMTP server. - if encryption == "SSL/TLS": - smtp = smtplib.SMTP_SSL(server_name, server_port, timeout=10) - elif encryption == "STARTTLS": - smtp = smtplib.SMTP(server_name, server_port, timeout=10) - smtp.starttls() - else: # No encryption - smtp = smtplib.SMTP(server_name, server_port, timeout=10) - - - # Authenticate if needed. - if auth_required: - try: # Trying to login and catching specific SMTPNotSupportedError - smtp.login(username, password) - except smtplib.SMTPNotSupportedError: - return 'SMTP AUTH extension not supported by server.' - - # Create a message. - msg = MIMEMultipart() - msg['From'] = from_email - msg['To'] = to_email - msg['Subject'] = subject - msg.attach(MIMEText(body, 'plain')) - - # Send the message. - smtp.send_message(msg) - smtp.quit() - return 'Email sent successfully.' - - elif send_mode == "Sendmail": - pass - except ssl.SSLError: - return 'SSL Wrong Version Number. Try another ssl type?' - except smtplib.SMTPAuthenticationError: - return 'Authentication Error: Invalid username or password.' - except smtplib.SMTPRecipientsRefused: - return 'Recipients Refused: Email address is not accepted by the server.' - except smtplib.SMTPSenderRefused: - return 'Sender Refused: Sender address is not accepted by the server.' - except smtplib.SMTPDataError: - return 'Unexpected server response: Possibly the message data was rejected by the server.' - except socket.gaierror: - return 'Server Not Found: Please check your server settings.' - except ConnectionRefusedError: - return 'Connection Refused: The server refused the connection.' - except TimeoutError: - return 'Timeout Error: The connection to the server timed out.' - except smtplib.SMTPException as e: - return f'Failed to send email: {str(e)}' - - - -def sync_with_nextcloud(nextcloud_url, nextcloud_token): - print("Starting Nextcloud Sync") - - headers = { - "Authorization": f"Bearer {nextcloud_token}", - "Content-Type": "application/json" - } - - # Sync Subscriptions - sync_subscriptions(nextcloud_url, headers) - - # Sync Episode Actions - sync_episode_actions(nextcloud_url, headers) - - -def sync_subscriptions(nextcloud_url, headers, user_id): - import requests - # Implement fetching and updating subscriptions - # Example GET request to fetch subscriptions - response = requests.get(f"{nextcloud_url}/index.php/apps/gpoddersync/subscriptions", headers=headers) - # Handle the response - print(response.json()) - - -def sync_subscription_change(nextcloud_url, headers, add, remove): - import requests - payload = { - "add": add, - "remove": remove - } - response = requests.post(f"{nextcloud_url}/index.php/apps/gpoddersync/subscription_change/create", json=payload, - headers=headers) - -def sync_subscription_change_gpodder(gpodder_url, gpodder_login, auth, add, remove): - import requests - payload = { - "add": add, - "remove": remove - } - response = requests.post(f"{gpodder_url}/api/2/subscriptions/{gpodder_login}/default.json", json=payload, auth=auth) - response.raise_for_status() - print(f"Subscription changes synced with gPodder: {response.text}") - - -def sync_subscription_change_gpodder_session(session, gpodder_url, gpodder_login, add, remove): - """Sync subscription changes using session-based authentication""" - import logging - - logger = logging.getLogger(__name__) - - payload = { - "add": add, - "remove": remove - } - - try: - response = session.post( - f"{gpodder_url}/api/2/subscriptions/{gpodder_login}/default.json", - json=payload - ) - response.raise_for_status() - logger.info(f"Subscription changes synced with gPodder using session: {response.text}") - return True - except Exception as e: - logger.error(f"Error syncing subscription changes with session: {str(e)}") - return False - -def sync_episode_actions(nextcloud_url, headers): - print('test') - # Implement fetching and creating episode actions - # Similar to the sync_subscriptions method - -def get_podcast_values(feed_url, user_id, username: Optional[str] = None, password: Optional[str] = None, display_only: bool = False): - import feedparser - import json - import requests - from requests.auth import HTTPBasicAuth - - # Use requests to fetch the feed content - try: - # Simpler headers that worked in the original version - headers = { - 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3', - 'Accept-Language': 'en-US,en;q=0.9', - } - print(f"Fetching URL: {feed_url}") - - if username and password: - print(f"Using auth for user: {username}") - response = requests.get(feed_url, headers=headers, auth=HTTPBasicAuth(username, password)) - else: - response = requests.get(feed_url, headers=headers) - - response.raise_for_status() - # Use binary content which worked in the original version - feed_content = response.content - - except requests.RequestException as e: - try: - if 'response' in locals(): - print(f"Response headers: {response.headers}") - print(f"Response content: {response.content[:500]}") - except: - pass - raise ValueError(f"Error fetching the feed: {str(e)}") - - # Parse the feed - d = feedparser.parse(feed_content) - print(f"Feed parsed - title: {d.feed.get('title', 'Unknown')}") - - # Initialize podcast_values as in the original version that worked - podcast_values = { - 'pod_title': d.feed.title if hasattr(d.feed, 'title') else None, - 'pod_artwork': None, # We'll set this with multiple checks below - 'pod_author': d.feed.author if hasattr(d.feed, 'author') else None, - 'categories': [], - 'pod_description': d.feed.description if hasattr(d.feed, 'description') else None, - 'pod_episode_count': len(d.entries) if display_only else 0, - 'pod_feed_url': feed_url, - 'pod_website': d.feed.link if hasattr(d.feed, 'link') else None, - 'pod_explicit': False, - 'user_id': user_id - } - - # Enhanced image URL extraction combining both approaches - if hasattr(d.feed, 'image'): - if hasattr(d.feed.image, 'href'): - podcast_values['pod_artwork'] = d.feed.image.href - elif hasattr(d.feed.image, 'url'): # Added for news feed format - podcast_values['pod_artwork'] = d.feed.image.url - elif isinstance(d.feed.image, dict): - if 'href' in d.feed.image: - podcast_values['pod_artwork'] = d.feed.image['href'] - elif 'url' in d.feed.image: - podcast_values['pod_artwork'] = d.feed.image['url'] - - # iTunes image fallback - if not podcast_values['pod_artwork'] and hasattr(d.feed, 'itunes_image'): - if hasattr(d.feed.itunes_image, 'href'): - podcast_values['pod_artwork'] = d.feed.itunes_image.href - elif isinstance(d.feed.itunes_image, dict) and 'href' in d.feed.itunes_image: - podcast_values['pod_artwork'] = d.feed.itunes_image['href'] - - # Author fallback - if not podcast_values['pod_author'] and hasattr(d.feed, 'itunes_author'): - podcast_values['pod_author'] = d.feed.itunes_author - - # Description fallbacks - if not podcast_values['pod_description']: - if hasattr(d.feed, 'subtitle'): - podcast_values['pod_description'] = d.feed.subtitle - elif hasattr(d.feed, 'itunes_summary'): - podcast_values['pod_description'] = d.feed.itunes_summary - - # Category extraction with robust error handling - try: - if hasattr(d.feed, 'itunes_category'): - if isinstance(d.feed.itunes_category, list): - for cat in d.feed.itunes_category: - if isinstance(cat, dict) and 'text' in cat: - podcast_values['categories'].append(cat['text']) - elif hasattr(cat, 'text'): - podcast_values['categories'].append(cat.text) - elif isinstance(d.feed.itunes_category, dict) and 'text' in d.feed.itunes_category: - podcast_values['categories'].append(d.feed.itunes_category['text']) - except Exception as e: - print(f"Error extracting categories: {e}") - - # Handle empty categories - if not podcast_values['categories']: - podcast_values['categories'] = {'1': 'Podcasts'} # Default category - else: - categories_dict = {str(i): cat for i, cat in enumerate(podcast_values['categories'], start=1)} - podcast_values['categories'] = categories_dict - - # Add explicit check with robust handling - try: - if hasattr(d.feed, 'itunes_explicit'): - if isinstance(d.feed.itunes_explicit, str): - podcast_values['pod_explicit'] = d.feed.itunes_explicit.lower() in ('yes', 'true', '1') - elif isinstance(d.feed.itunes_explicit, bool): - podcast_values['pod_explicit'] = d.feed.itunes_explicit - except Exception as e: - print(f"Error checking explicit flag: {e}") - - # Print values for debugging - print("Extracted podcast values:") - for key, value in podcast_values.items(): - print(f"{key}: {value}") - - return podcast_values - - - -def check_valid_feed(feed_url: str, username: Optional[str] = None, password: Optional[str] = None): - """ - Check if the provided URL points to a valid podcast feed. - Raises ValueError if the feed is invalid. - """ - import feedparser - import requests - # Use requests to fetch the feed content - try: - if username and password: - response = requests.get(feed_url, auth=(username, password)) - else: - response = requests.get(feed_url) - - response.raise_for_status() # Raise an exception for HTTP errors - feed_content = response.content - except requests.RequestException as e: - raise ValueError(f"Error fetching the feed: {str(e)}") - - # Parse the feed - parsed_feed = feedparser.parse(feed_content) - - # Check for basic RSS or Atom feed structure - if not parsed_feed.get('version'): - raise ValueError("Invalid podcast feed URL or content.") - - # Check for essential elements in the feed - if not ('title' in parsed_feed.feed and 'link' in parsed_feed.feed and 'description' in parsed_feed.feed): - raise ValueError("Feed missing required attributes: title, link, or description.") - - # If it passes the above checks, it's likely a valid feed - return parsed_feed diff --git a/database_functions/auth_functions.py b/database_functions/auth_functions.py deleted file mode 100644 index 43367061..00000000 --- a/database_functions/auth_functions.py +++ /dev/null @@ -1,41 +0,0 @@ -from passlib.context import CryptContext - -# Create a Passlib context for Argon2 -pwd_context = CryptContext(schemes=["argon2"], deprecated="auto") - -def hash_password(password: str): - # Use the Passlib context to hash the password - hashed_password = pwd_context.hash(password) - return hashed_password - -def verify_password(cnx, database_type, username: str, password: str) -> bool: - print("preparing pw check") - if database_type == "postgresql": - cursor = cnx.cursor() - cursor.execute('SELECT Hashed_PW FROM "Users" WHERE Username = %s', (username,)) - else: # MySQL or MariaDB - cursor = cnx.cursor(buffered=True) - cursor.execute("SELECT Hashed_PW FROM Users WHERE Username = %s", (username,)) - - result = cursor.fetchone() - cursor.close() - print("ran pw get") - - if not result: - print("User not found") - return False # User not found - - stored_hashed_password = result[0] if isinstance(result, tuple) else result["hashed_pw"] if result and "hashed_pw" in result else 0 - # Check the type of the result and access the is_admin value accordingly - # is_admin = is_admin_result[0] if isinstance(is_admin_result, tuple) else is_admin_result["IsAdmin"] if is_admin_result else 0 - - print(f"Stored hashed password: {stored_hashed_password}") - - try: - # Use the Passlib context to verify the password against the stored hash - is_valid = pwd_context.verify(password, stored_hashed_password) - print(f"Password verification result: {is_valid}") - return is_valid - except Exception as e: - print(f"Error verifying password: {e}") - return False \ No newline at end of file diff --git a/database_functions/db_client.py b/database_functions/db_client.py deleted file mode 100644 index de26eb47..00000000 --- a/database_functions/db_client.py +++ /dev/null @@ -1,141 +0,0 @@ -import os -import logging -import traceback -from fastapi import HTTPException -import psycopg -from psycopg_pool import ConnectionPool -from psycopg.rows import dict_row -from mysql.connector import pooling - -# Set up logging -logger = logging.getLogger(__name__) - -# Get database type from environment variable -database_type = str(os.getenv('DB_TYPE', 'mariadb')) - -# Create a singleton for the connection pool -class DatabaseConnectionPool: - _instance = None - _pool = None - - @classmethod - def get_instance(cls): - if cls._instance is None: - cls._instance = DatabaseConnectionPool() - return cls._instance - - def __init__(self): - if self._pool is None: - self._pool = self._create_pool() - - def _create_pool(self): - """Create a new connection pool based on the database type""" - db_host = os.environ.get("DB_HOST", "127.0.0.1") - db_port = os.environ.get("DB_PORT", "3306") - db_user = os.environ.get("DB_USER", "root") - db_password = os.environ.get("DB_PASSWORD", "password") - db_name = os.environ.get("DB_NAME", "pypods_database") - - print(f"Creating new database connection pool for {database_type}") - - if database_type == "postgresql": - conninfo = f"host={db_host} port={db_port} user={db_user} password={db_password} dbname={db_name}" - return ConnectionPool(conninfo=conninfo, min_size=1, max_size=32, open=True) - else: - # Add the autocommit and consume_results options to MySQL - return pooling.MySQLConnectionPool( - pool_name="pinepods_api_pool", - pool_size=32, - pool_reset_session=True, - autocommit=True, # Add this to prevent transaction issues - consume_results=True, # Add this to automatically consume unread results - collation="utf8mb4_general_ci", - host=db_host, - port=db_port, - user=db_user, - password=db_password, - database=db_name, - ) - - def get_connection(self): - """Get a connection from the pool""" - if database_type == "postgresql": - return self._pool.getconn() - else: - return self._pool.get_connection() - - def return_connection(self, cnx): - """Return a connection to the pool""" - if database_type == "postgresql": - self._pool.putconn(cnx) # PostgreSQL path unchanged - else: - # MySQL-specific cleanup - try: - # Clear any unread results before returning to pool - if hasattr(cnx, 'unread_result') and cnx.unread_result: - cursor = cnx.cursor() - cursor.fetchall() - cursor.close() - except Exception as e: - logger.warning(f"Failed to clean up MySQL connection: {str(e)}") - finally: - cnx.close() - -# Initialize the singleton pool -pool = DatabaseConnectionPool.get_instance() - -def create_database_connection(): - """Create and return a new database connection""" - try: - return pool.get_connection() - except Exception as e: - print(f"Database connection error: {str(e)}") - logger.error(f"Database connection error of type {type(e).__name__} with arguments: {e.args}") - logger.error(traceback.format_exc()) - raise RuntimeError("Unable to connect to the database") - -def close_database_connection(cnx): - """Close a database connection and handle both PostgreSQL and MySQL connections properly""" - if cnx is None: - return - - try: - # First determine the connection type - is_psql = hasattr(cnx, 'closed') # PostgreSQL has a 'closed' attribute - - if is_psql: - # PostgreSQL connection - try to return to pool first - try: - if not cnx.closed and pool is not None: - pool.return_connection(cnx) - return - except Exception as pool_err: - print(f"Could not return connection to pool: {str(pool_err)}") - # Fall back to direct close if return fails - if not cnx.closed: - cnx.close() - else: - # MySQL connection - just close directly, don't try to use the pool - if hasattr(cnx, 'close'): - cnx.close() - except Exception as e: - print(f"Error closing connection: {str(e)}") - logger.error(f"Error closing connection: {str(e)}") - -# For FastAPI dependency injection -def get_database_connection(): - """FastAPI dependency for getting a database connection""" - try: - cnx = create_database_connection() - yield cnx - except HTTPException: - raise # Re-raise the HTTPException to let FastAPI handle it properly - except Exception as e: - logger.error(f"Database connection error of type {type(e).__name__} with arguments: {e.args}") - logger.error(traceback.format_exc()) - raise HTTPException(500, "Unable to connect to the database") - finally: - try: - close_database_connection(cnx) - except Exception as e: - logger.error(f"Error in connection cleanup: {str(e)}") diff --git a/database_functions/functions.py b/database_functions/functions.py deleted file mode 100644 index 9d5d22ab..00000000 --- a/database_functions/functions.py +++ /dev/null @@ -1,15213 +0,0 @@ -import random -import string -import mysql.connector -from mysql.connector import errorcode -import mysql.connector.pooling -import sys -import os -import requests -import feedgenerator -import datetime -from datetime import timedelta -import time -import appdirs -import base64 -import subprocess -import psycopg -from psycopg.rows import dict_row -from requests.exceptions import RequestException -from fastapi import HTTPException -from mysql.connector import ProgrammingError -import feedparser -import dateutil.parser -import re -import requests -from requests.auth import HTTPBasicAuth -from urllib.parse import urlparse, urlunparse -from typing import List, Optional -import pytz -from yt_dlp import YoutubeDL -from database_functions import youtube -from database_functions import mp3_metadata -import logging -from cryptography.fernet import Fernet -from requests.exceptions import RequestException -import shutil -import tempfile -import secrets -import html - -# # Get the application root directory from the environment variable -# app_root = os.environ.get('APP_ROOT') -sys.path.append('/pinepods/'), -# Import the functions directly from app_functions.py located in the database_functions directory -from database_functions.app_functions import sync_subscription_change, get_podcast_values, check_valid_feed, sync_subscription_change_gpodder - - -def pascal_case(snake_str): - return ''.join(word.title() for word in snake_str.split('_')) - -def lowercase_keys(data): - if isinstance(data, dict): - return {k.lower(): (bool(v) if k.lower() == 'completed' else v) for k, v in data.items()} - elif isinstance(data, list): - return [lowercase_keys(item) for item in data] - return data - -def convert_bools(data, database_type): - def convert_value(k, v): - if k.lower() == 'explicit': - if database_type == 'postgresql': - return v == True - else: - return bool(v) - return v - - if isinstance(data, dict): - return {k: convert_value(k, v) for k, v in data.items()} - elif isinstance(data, list): - return [convert_bools(item, database_type) for item in data] - return data - -def capitalize_keys(data): - if isinstance(data, dict): - return {pascal_case(k): v for k, v in data.items()} - elif isinstance(data, list): - return [capitalize_keys(item) for item in data] - return data - -def normalize_keys(data, database_type): - if database_type == "postgresql": - # Convert keys to PascalCase - return {pascal_case(k): v for k, v in data.items()} - return data - -def get_value(result, key, default=None): - """ - Helper function to extract value from result set. - It handles both dictionaries and tuples. - """ - key_lower = key.lower() - if isinstance(result, dict): - # Handles keys returned as lowercase in PostgreSQL - return result.get(key_lower, default) - elif isinstance(result, tuple): - # Handles keys with tuple index mapping - key_map = { - "podcastid": 0, - "episodeurl": 0, - "podcastname": 0 - } - index = key_map.get(key_lower) - return result[index] if index is not None else default - return default - - - -def get_web_key(cnx, database_type): - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'SELECT APIKey FROM "APIKeys" WHERE UserID = 1' - else: - query = "SELECT APIKey FROM APIKeys WHERE UserID = 1" - cursor.execute(query) - result = cursor.fetchone() - cursor.close() - - if result: - # Handle both tuple and dictionary return types - if isinstance(result, dict): - return result['apikey'] - else: - return result[0] - return None - -def add_custom_podcast(database_type, cnx, feed_url, user_id, username=None, password=None): - # Proceed to extract and use podcast details if the feed is valid - podcast_values = get_podcast_values(feed_url, user_id, username, password) - try: - feed_cutoff = 30 - result = add_podcast(cnx, database_type, podcast_values, user_id, feed_cutoff, username, password) - if not result: - raise Exception("Failed to add the podcast.") - - # Handle the tuple return value - if isinstance(result, tuple): - podcast_id = result[0] # Extract just the podcast_id - else: - podcast_id = result - - return podcast_id - - except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) - -def add_news_feed_if_not_added(database_type, cnx): - cursor = cnx.cursor() - try: - # Get all admin users - if database_type == "postgresql": - cursor.execute('SELECT UserID FROM "Users" WHERE IsAdmin = TRUE') - else: # MySQL or MariaDB - cursor.execute("SELECT UserID FROM Users WHERE IsAdmin = 1") - - admin_users = cursor.fetchall() - feed_url = "https://news.pinepods.online/feed.xml" - - # Add feed for each admin user if they don't already have it - for admin in admin_users: - user_id = admin[0] - - # Check if this user already has the news feed - if database_type == "postgresql": - cursor.execute('SELECT PodcastID FROM "Podcasts" WHERE UserID = %s AND FeedURL = %s', (user_id, feed_url)) - else: # MySQL or MariaDB - cursor.execute("SELECT PodcastID FROM Podcasts WHERE UserID = %s AND FeedURL = %s", (user_id, feed_url)) - - existing_feed = cursor.fetchone() - - if existing_feed is None: - add_custom_podcast(database_type, cnx, feed_url, user_id) - cnx.commit() - - except (psycopg.ProgrammingError, mysql.connector.ProgrammingError) as e: - print(f"Error in add_news_feed_if_not_added: {e}") - cnx.rollback() - finally: - cursor.close() - - -def add_podcast(cnx, database_type, podcast_values, user_id, feed_cutoff, username=None, password=None, podcast_index_id=0): - cursor = cnx.cursor() - - # If podcast_index_id is 0, try to fetch it from the API - if podcast_index_id == 0: - api_url = os.environ.get("SEARCH_API_URL", "https://api.pinepods.online/api/search") - search_url = f"{api_url}?query={podcast_values['pod_title']}" - - try: - response = requests.get(search_url) - response.raise_for_status() - data = response.json() - - if data['status'] == 'true' and data['feeds']: - for feed in data['feeds']: - if feed['title'] == podcast_values['pod_title']: - podcast_index_id = feed['id'] - break - - if podcast_index_id == 0: - print(f"Couldn't find PodcastIndexID for {podcast_values['pod_title']}") - except Exception as e: - print(f"Error fetching PodcastIndexID: {e}") - - - try: - if database_type == "postgresql": - query = 'SELECT PodcastID, PodcastName, FeedURL FROM "Podcasts" WHERE FeedURL = %s AND UserID = %s' - else: - query = "SELECT PodcastID, PodcastName, FeedURL FROM Podcasts WHERE FeedURL = %s AND UserID = %s" - - cursor.execute(query, (podcast_values['pod_feed_url'], user_id)) - result = cursor.fetchone() - print(f"Existing podcast check - Query result: {result}") - print(f"Checking for feed URL: {podcast_values['pod_feed_url']}") - - if result is not None: - # Print more details for debugging - handle both dict and tuple - if isinstance(result, dict): - print(f"Matched podcast - ID: {result['podcastid']}, Name: {result['podcastname']}, URL: {result['feedurl']}") - podcast_id = result['podcastid'] - elif isinstance(result, tuple): - print(f"Matched podcast - ID: {result[0]}, Name: {result[1]}, URL: {result[2]}") - podcast_id = result[0] - else: - print(f"Unexpected result type: {type(result)}") - podcast_id = result # Fallback for scalar result - - # Add this check right before calling add_episodes in the "if result is not None:" block - if database_type == "postgresql": - episode_count_query = 'SELECT COUNT(*) FROM "Episodes" WHERE PodcastID = %s' - reset_count_query = 'UPDATE "Podcasts" SET EpisodeCount = 0 WHERE PodcastID = %s' - else: # MySQL or MariaDB - episode_count_query = "SELECT COUNT(*) FROM Episodes WHERE PodcastID = %s" - reset_count_query = "UPDATE Podcasts SET EpisodeCount = 0 WHERE PodcastID = %s" - - # Check if there are any episodes for this podcast - cursor.execute(episode_count_query, (podcast_id,)) - episode_count_result = cursor.fetchone() - - # Handle both dict and tuple for episode count result - if isinstance(episode_count_result, dict): - episode_count = episode_count_result.get('count', episode_count_result.get('COUNT(*)', 0)) - elif isinstance(episode_count_result, tuple): - episode_count = episode_count_result[0] - else: - episode_count = episode_count_result - - # If there are no episodes but the podcast has a non-zero count, reset it to 0 - if episode_count == 0: - # Get the current episode count from Podcasts table - if database_type == "postgresql": - podcast_count_query = 'SELECT EpisodeCount FROM "Podcasts" WHERE PodcastID = %s' - else: - podcast_count_query = "SELECT EpisodeCount FROM Podcasts WHERE PodcastID = %s" - - cursor.execute(podcast_count_query, (podcast_id,)) - podcast_count_result = cursor.fetchone() - - # Handle both dict and tuple for podcast count result - if isinstance(podcast_count_result, dict): - podcast_count = podcast_count_result.get('episodecount', podcast_count_result.get('EpisodeCount', 0)) - elif isinstance(podcast_count_result, tuple): - podcast_count = podcast_count_result[0] - else: - podcast_count = podcast_count_result - - # If the podcast has a non-zero count but no episodes, reset it - if podcast_count > 0: - print(f"Resetting episode count for podcast {podcast_id} from {podcast_count} to 0") - cursor.execute(reset_count_query, (podcast_id,)) - cnx.commit() - - # Now proceed with add_episodes as normal - first_episode_id = add_episodes(cnx, database_type, podcast_id, podcast_values['pod_feed_url'], - podcast_values['pod_artwork'], False, username=username, password=password) - print("Episodes added for existing podcast") - # Return both IDs like we do for new podcasts - return podcast_id, first_episode_id - - # Extract category names and convert to comma-separated string - categories = podcast_values['categories'] - print(f"Categories: {categories}") - - if isinstance(categories, dict): - category_list = ', '.join(categories.values()) - elif isinstance(categories, list): - category_list = ', '.join(categories) - elif isinstance(categories, str): - category_list = categories - else: - category_list = '' - - if database_type == "postgresql": - add_podcast_query = """ - INSERT INTO "Podcasts" - (PodcastName, ArtworkURL, Author, Categories, Description, EpisodeCount, FeedURL, WebsiteURL, Explicit, UserID, FeedCutoffDays, Username, Password, PodcastIndexID) - VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) RETURNING PodcastID - """ - explicit = podcast_values['pod_explicit'] - else: # MySQL or MariaDB - add_podcast_query = """ - INSERT INTO Podcasts - (PodcastName, ArtworkURL, Author, Categories, Description, EpisodeCount, FeedURL, WebsiteURL, Explicit, UserID, FeedCutoffDays, Username, Password, PodcastIndexID) - VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) - """ - explicit = 1 if podcast_values['pod_explicit'] else 0 - - - print("Inserting into db") - print(podcast_values['pod_title']) - print(podcast_values['pod_artwork']) - print(podcast_values['pod_author']) - print(category_list) - print(podcast_values['pod_description']) - print(podcast_values['pod_episode_count']) - print(podcast_values['pod_feed_url']) - print(podcast_values['pod_website']) - print(explicit) - print(user_id) - try: - cursor.execute(add_podcast_query, ( - podcast_values['pod_title'], - podcast_values['pod_artwork'], - podcast_values['pod_author'], - category_list, - podcast_values['pod_description'], - 0, - podcast_values['pod_feed_url'], - podcast_values['pod_website'], - explicit, - user_id, - feed_cutoff, - username, - password, - podcast_index_id - )) - - if database_type == "postgresql": - podcast_id = cursor.fetchone() - if isinstance(podcast_id, tuple): - podcast_id = podcast_id[0] - elif isinstance(podcast_id, dict): - podcast_id = podcast_id['podcastid'] - else: # MySQL or MariaDB - cnx.commit() - podcast_id = cursor.lastrowid - - print('pre-id') - if podcast_id is None: - logging.error("No row was inserted.") - print("No row was inserted.") - cursor.close() - return False - - print("Got id") - print("Inserted into db") - - # Update UserStats table to increment PodcastsAdded count - if database_type == "postgresql": - query = 'UPDATE "UserStats" SET PodcastsAdded = PodcastsAdded + 1 WHERE UserID = %s' - else: # MySQL or MariaDB - query = "UPDATE UserStats SET PodcastsAdded = PodcastsAdded + 1 WHERE UserID = %s" - - cursor.execute(query, (user_id,)) - cnx.commit() - print("stats table updated") - - # Add episodes to database - first_episode_id = add_episodes(cnx, database_type, podcast_id, podcast_values['pod_feed_url'], - podcast_values['pod_artwork'], False, username=username, password=password, websocket=False) - print("episodes added") - return podcast_id, first_episode_id - - except Exception as e: - logging.error(f"Failed to add podcast: {e}") - print(f"Failed to add podcast: {e}") - cnx.rollback() - cursor.close() - raise Exception(f"Failed to add podcast: {e}") - - except Exception as e: - print(f"Error during podcast insertion or UserStats update: {e}") - logging.error(f"Error during podcast insertion or UserStats update: {e}") - cnx.rollback() - raise - - finally: - cursor.close() - - # Return True to indicate success - return True - - -def add_person_podcast(cnx, database_type, podcast_values, user_id, username=None, password=None, podcast_index_id=0): - cursor = cnx.cursor() - - # If podcast_index_id is 0, try to fetch it from the API - if podcast_index_id == 0: - api_url = os.environ.get("SEARCH_API_URL", "https://api.pinepods.online/api/search") - search_url = f"{api_url}?query={podcast_values['pod_title']}" - - try: - response = requests.get(search_url) - response.raise_for_status() - data = response.json() - - if data['status'] == 'true' and data['feeds']: - for feed in data['feeds']: - if feed['title'] == podcast_values['pod_title']: - podcast_index_id = feed['id'] - break - - if podcast_index_id == 0: - print(f"Couldn't find PodcastIndexID for {podcast_values['pod_title']}") - except Exception as e: - print(f"Error fetching PodcastIndexID: {e}") - - - try: - # Check if the podcast already exists for the user - if database_type == "postgresql": - query = 'SELECT PodcastID FROM "Podcasts" WHERE FeedURL = %s AND UserID = %s' - else: # MySQL or MariaDB - query = "SELECT PodcastID FROM Podcasts WHERE FeedURL = %s AND UserID = %s" - - cursor.execute(query, (podcast_values['pod_feed_url'], user_id)) - result = cursor.fetchone() - print(f"Result: {result}") - print("Checked for existing podcast") - - if result is not None: - # Podcast already exists for the user, return False - cursor.close() - return False - - # Extract category names and convert to comma-separated string - categories = podcast_values['categories'] - print(f"Categories: {categories}") - - if isinstance(categories, dict): - category_list = ', '.join(categories.values()) - elif isinstance(categories, list): - category_list = ', '.join(categories) - elif isinstance(categories, str): - category_list = categories - else: - category_list = '' - - if database_type == "postgresql": - add_podcast_query = """ - INSERT INTO "Podcasts" - (PodcastName, ArtworkURL, Author, Categories, Description, EpisodeCount, FeedURL, WebsiteURL, Explicit, UserID, Username, Password, PodcastIndexID) - VALUES (%s, %s, %s, %s, %s, 0, %s, %s, %s, %s, %s, %s, %s) RETURNING PodcastID - """ - explicit = podcast_values['pod_explicit'] - else: # MySQL or MariaDB - add_podcast_query = """ - INSERT INTO Podcasts - (PodcastName, ArtworkURL, Author, Categories, Description, EpisodeCount, FeedURL, WebsiteURL, Explicit, UserID, Username, Password, PodcastIndexID) - VALUES (%s, %s, %s, %s, %s, 0, %s, %s, %s, %s, %s, %s, %s) - """ - explicit = 1 if podcast_values['pod_explicit'] else 0 - - - print("Inserting into db") - print(podcast_values['pod_title']) - print(podcast_values['pod_artwork']) - print(podcast_values['pod_author']) - print(category_list) - print(podcast_values['pod_description']) - print(podcast_values['pod_episode_count']) - print(podcast_values['pod_feed_url']) - print(podcast_values['pod_website']) - print(explicit) - print(user_id) - try: - cursor.execute(add_podcast_query, ( - podcast_values['pod_title'], - podcast_values['pod_artwork'], - podcast_values['pod_author'], - category_list, - podcast_values['pod_description'], - podcast_values['pod_feed_url'], - podcast_values['pod_website'], - explicit, - user_id, - username, - password, - podcast_index_id - )) - - if database_type == "postgresql": - podcast_id = cursor.fetchone() - if isinstance(podcast_id, tuple): - podcast_id = podcast_id[0] - elif isinstance(podcast_id, dict): - podcast_id = podcast_id['podcastid'] - else: # MySQL or MariaDB - cnx.commit() - podcast_id = cursor.lastrowid - - print('pre-id') - if podcast_id is None: - logging.error("No row was inserted.") - print("No row was inserted.") - cursor.close() - return False - - except Exception as e: - logging.error(f"Failed to add podcast: {e}") - print(f"Failed to add podcast: {e}") - cnx.rollback() - cursor.close() - return False - - except Exception as e: - print(f"Error during podcast insertion or UserStats update: {e}") - logging.error(f"Error during podcast insertion or UserStats update: {e}") - cnx.rollback() - raise - - finally: - cursor.close() - - # Return True to indicate success - return True - - -def add_user(cnx, database_type, user_values): - cursor = cnx.cursor() - try: - print(f"Adding user with values: {user_values}") - if database_type == "postgresql": - add_user_query = """ - INSERT INTO "Users" - (Fullname, Username, Email, Hashed_PW, IsAdmin) - VALUES (%s, %s, %s, %s, false) - RETURNING UserID - """ - else: # MySQL or MariaDB - add_user_query = """ - INSERT INTO Users - (Fullname, Username, Email, Hashed_PW, IsAdmin) - VALUES (%s, %s, %s, %s, 0) - """ - - cursor.execute(add_user_query, user_values) - - # Handle the user ID retrieval - if database_type == "postgresql": - result = cursor.fetchone() - if result is None: - raise Exception("Failed to create user - no ID returned") - # Print the result for debugging - print(f"Raw PostgreSQL result: {result}") - logging.debug(f"Raw PostgreSQL result: {result}") - # Handle different return types - if isinstance(result, dict): - # Try different case variations - user_id = result.get('userid') or result.get('UserID') or result.get('userId') or result.get('user_id') - else: - user_id = result[0] - if not user_id: - raise Exception("Failed to create user - invalid ID returned") - else: # MySQL or MariaDB - # Get the last inserted ID for MySQL - user_id = cursor.lastrowid - if not user_id: - raise Exception("Failed to create user - no ID returned from MySQL") - print(f"MySQL generated user_id: {user_id}") - - # Add user settings - settings_query = """ - INSERT INTO "UserSettings" - (UserID, Theme) - VALUES (%s, %s) - """ if database_type == "postgresql" else """ - INSERT INTO UserSettings - (UserID, Theme) - VALUES (%s, %s) - """ - cursor.execute(settings_query, (user_id, 'Nordic')) - - # Add user stats - stats_query = """ - INSERT INTO "UserStats" - (UserID) - VALUES (%s) - """ if database_type == "postgresql" else """ - INSERT INTO UserStats - (UserID) - VALUES (%s) - """ - cursor.execute(stats_query, (user_id,)) - - cnx.commit() - return user_id - except Exception as e: - cnx.rollback() - logging.error(f"Error in add_user: {str(e)}") - raise - finally: - cursor.close() - -def add_admin_user(cnx, database_type, user_values): - cursor = cnx.cursor() - try: - if database_type == "postgresql": - add_user_query = """ - WITH inserted_user AS ( - INSERT INTO "Users" - (Fullname, Username, Email, Hashed_PW, IsAdmin) - VALUES (%s, %s, %s, %s, TRUE) - ON CONFLICT (Username) DO NOTHING - RETURNING UserID - ) - SELECT UserID FROM inserted_user - UNION ALL - SELECT UserID FROM "Users" WHERE Username = %s - LIMIT 1 - """ - # Note: we add the username as an extra parameter here - cursor.execute(add_user_query, user_values + (user_values[1],)) - user_id = cursor.fetchone()[0] - else: # MySQL or MariaDB - add_user_query = """ - INSERT INTO Users - (Fullname, Username, Email, Hashed_PW, IsAdmin) - VALUES (%s, %s, %s, %s, 1) - """ - cursor.execute(add_user_query, user_values) - user_id = cursor.lastrowid - - # Now add settings and stats - if database_type == "postgresql": - add_user_settings_query = """ - INSERT INTO "UserSettings" - (UserID, Theme) - VALUES (%s, %s) - """ - else: - add_user_settings_query = """ - INSERT INTO UserSettings - (UserID, Theme) - VALUES (%s, %s) - """ - cursor.execute(add_user_settings_query, (user_id, 'Nordic')) - - if database_type == "postgresql": - add_user_stats_query = """ - INSERT INTO "UserStats" - (UserID) - VALUES (%s) - """ - else: - add_user_stats_query = """ - INSERT INTO UserStats - (UserID) - VALUES (%s) - """ - cursor.execute(add_user_stats_query, (user_id,)) - cnx.commit() - return user_id - finally: - cursor.close() - -def add_oidc_provider(cnx, database_type, provider_values): - cursor = cnx.cursor() - try: - if database_type == "postgresql": - add_provider_query = """ - INSERT INTO "OIDCProviders" - (ProviderName, ClientID, ClientSecret, AuthorizationURL, - TokenURL, UserInfoURL, ButtonText, Scope, - ButtonColor, ButtonTextColor, IconSVG, NameClaim, EmailClaim, - UsernameClaim, RolesClaim, UserRole, AdminRole) - VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, - %s, %s, %s) - RETURNING ProviderID - """ - else: # MySQL - add_provider_query = """ - INSERT INTO OIDCProviders - (ProviderName, ClientID, ClientSecret, AuthorizationURL, - TokenURL, UserInfoURL, ButtonText, Scope, - ButtonColor, ButtonTextColor, IconSVG, NameClaim, EmailClaim, - UsernameClaim, RolesClaim, UserRole, AdminRole) - VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, - %s, %s, %s) - """ - cursor.execute(add_provider_query, provider_values) - - if database_type == "postgresql": - result = cursor.fetchone() - if isinstance(result, dict): - provider_id = result.get('providerid') or result.get('ProviderID') or result.get('provider_id') - else: - provider_id = result[0] - else: - provider_id = cursor.lastrowid - - cnx.commit() - return provider_id - except Exception as e: - cnx.rollback() - logging.error(f"Error in add_oidc_provider: {str(e)}") - raise - finally: - cursor.close() - -def remove_oidc_provider(cnx, database_type, provider_id): - cursor = cnx.cursor() - try: - if database_type == "postgresql": - delete_query = """ - DELETE FROM "OIDCProviders" - WHERE ProviderID = %s - """ - else: - delete_query = """ - DELETE FROM OIDCProviders - WHERE ProviderID = %s - """ - cursor.execute(delete_query, (provider_id,)) - rows_affected = cursor.rowcount - cnx.commit() - return rows_affected > 0 - except Exception as e: - cnx.rollback() - logging.error(f"Error in remove_oidc_provider: {str(e)}") - raise - finally: - cursor.close() - -def list_oidc_providers(cnx, database_type): - cursor = cnx.cursor() - try: - if database_type == "postgresql": - list_query = """ - SELECT ProviderID, ProviderName, ClientID, AuthorizationURL, - TokenURL, UserInfoURL, ButtonText, Scope, ButtonColor, - ButtonTextColor, IconSVG, NameClaim, EmailClaim, UsernameClaim, - RolesClaim, UserRole, AdminRole, Enabled, Created, Modified - FROM "OIDCProviders" - ORDER BY ProviderName - """ - else: - list_query = """ - SELECT ProviderID, ProviderName, ClientID, AuthorizationURL, - TokenURL, UserInfoURL, ButtonText, Scope, ButtonColor, - ButtonTextColor, IconSVG, NameClaim, EmailClaim, UsernameClaim, - RolesClaim, UserRole, AdminRole, Enabled, Created, Modified - FROM OIDCProviders - ORDER BY ProviderName - """ - cursor.execute(list_query) - if database_type == "postgresql": - results = cursor.fetchall() - providers = [] - for row in results: - if isinstance(row, dict): - # For dict results, normalize the keys - normalized = {} - for key, value in row.items(): - normalized_key = key.lower() - if normalized_key == "providerid": - normalized["provider_id"] = value - elif normalized_key == "providername": - normalized["provider_name"] = value - elif normalized_key == "clientid": - normalized["client_id"] = value - elif normalized_key == "authorizationurl": - normalized["authorization_url"] = value - elif normalized_key == "tokenurl": - normalized["token_url"] = value - elif normalized_key == "userinfourl": - normalized["user_info_url"] = value - elif normalized_key == "buttontext": - normalized["button_text"] = value - elif normalized_key == "buttoncolor": - normalized["button_color"] = value - elif normalized_key == "buttontextcolor": - normalized["button_text_color"] = value - elif normalized_key == "iconsvg": - normalized["icon_svg"] = value - elif normalized_key == "nameclaim": - normalized["name_claim"] = value - elif normalized_key == "emailclaim": - normalized["email_claim"] = value - elif normalized_key == "usernameclaim": - normalized["username_claim"] = value - elif normalized_key == "rolesclaim": - normalized["roles_claim"] = value - elif normalized_key == "userrole": - normalized["user_role"] = value - elif normalized_key == "adminrole": - normalized["admin_role"] = value - else: - normalized[normalized_key] = value - providers.append(normalized) - else: - # For tuple results, use the existing mapping - providers.append({ - 'provider_id': row[0], - 'provider_name': row[1], - 'client_id': row[2], - 'authorization_url': row[3], - 'token_url': row[4], - 'user_info_url': row[5], - 'button_text': row[6], - 'scope': row[7], - 'button_color': row[8], - 'button_text_color': row[9], - 'icon_svg': row[10], - 'name_claim': row[11], - 'email_claim': row[12], - 'username_claim': row[13], - 'roles_claim': row[14], - 'user_role': row[15], - 'admin_role': row[16], - 'enabled': row[17], - 'created': row[18], - 'modified': row[19] - }) - else: - columns = [col[0] for col in cursor.description] - results = [dict(zip(columns, row)) for row in cursor.fetchall()] - # Normalize MySQL results the same way - providers = [] - for row in results: - normalized = {} - for key, value in row.items(): - normalized_key = key.lower() - if normalized_key == "providerid": - normalized["provider_id"] = value - elif normalized_key == "providername": - normalized["provider_name"] = value - elif normalized_key == "clientid": - normalized["client_id"] = value - elif normalized_key == "authorizationurl": - normalized["authorization_url"] = value - elif normalized_key == "tokenurl": - normalized["token_url"] = value - elif normalized_key == "userinfourl": - normalized["user_info_url"] = value - elif normalized_key == "buttontext": - normalized["button_text"] = value - elif normalized_key == "buttoncolor": - normalized["button_color"] = value - elif normalized_key == "buttontextcolor": - normalized["button_text_color"] = value - elif normalized_key == "iconsvg": - normalized["icon_svg"] = value - elif normalized_key == "nameclaim": - normalized["name_claim"] = value - elif normalized_key == "emailclaim": - normalized["email_claim"] = value - elif normalized_key == "usernameclaim": - normalized["username_claim"] = value - elif normalized_key == "rolesclaim": - normalized["roles_claim"] = value - elif normalized_key == "userrole": - normalized["user_role"] = value - elif normalized_key == "adminrole": - normalized["admin_role"] = value - elif normalized_key == "enabled": - # Convert MySQL TINYINT to boolean - normalized["enabled"] = bool(value) - else: - normalized[normalized_key] = value - providers.append(normalized) - return providers - except Exception as e: - logging.error(f"Error in list_oidc_providers: {str(e)}") - raise - finally: - cursor.close() - -def get_public_oidc_providers(cnx, database_type): - """Get minimal provider info needed for login buttons.""" - cursor = cnx.cursor() - try: - if database_type == "postgresql": - query = ''' - SELECT - ProviderID, - ProviderName, - ClientID, - AuthorizationURL, - Scope, - ButtonColor, - ButtonText, - ButtonTextColor, - IconSVG - FROM "OIDCProviders" - WHERE Enabled = TRUE - ''' - else: - query = ''' - SELECT - ProviderID, - ProviderName, - ClientID, - AuthorizationURL, - Scope, - ButtonColor, - ButtonText, - ButtonTextColor, - IconSVG - FROM OIDCProviders - WHERE Enabled = TRUE - ''' - cursor.execute(query) - results = cursor.fetchall() - providers = [] - - for row in results: - if isinstance(row, dict): - # For dict results, normalize the keys - normalized = {} - for key, value in row.items(): - normalized_key = key.lower() - if normalized_key == "providerid": - normalized["provider_id"] = value - elif normalized_key == "providername": - normalized["provider_name"] = value - elif normalized_key == "clientid": - normalized["client_id"] = value - elif normalized_key == "authorizationurl": - normalized["authorization_url"] = value - elif normalized_key == "buttoncolor": - normalized["button_color"] = value - elif normalized_key == "buttontext": - normalized["button_text"] = value - elif normalized_key == "buttontextcolor": - normalized["button_text_color"] = value - elif normalized_key == "iconsvg": - normalized["icon_svg"] = value - else: - normalized[normalized_key] = value - providers.append(normalized) - else: - # For tuple results, use index-based mapping - providers.append({ - "provider_id": row[0], - "provider_name": row[1], - "client_id": row[2], - "authorization_url": row[3], - "scope": row[4], - "button_color": row[5], - "button_text": row[6], - "button_text_color": row[7], - "icon_svg": row[8] - }) - - return providers - except Exception as e: - logging.error(f"Error in get_public_oidc_providers: {str(e)}") - raise - finally: - cursor.close() - -def get_pinepods_version(): - try: - with open('/pinepods/current_version', 'r') as file: - version = file.read().strip() - if not version: - return 'dev_mode' - return version - except FileNotFoundError: - return "Version file not found." - except Exception as e: - return f"An error occurred: {e}" - -def get_first_episode_id(cnx, database_type, podcast_id, is_youtube=False): - print('getting first ep id') - cursor = cnx.cursor() - try: - if is_youtube: - if database_type == "postgresql": - query = 'SELECT VIDEOID FROM "YouTubeVideos" WHERE PODCASTID = %s ORDER BY PUBLISHEDAT ASC LIMIT 1' - else: # MySQL or MariaDB - query = "SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s ORDER BY PublishedAt ASC LIMIT 1" - else: - if database_type == "postgresql": - query = 'SELECT EPISODEID FROM "Episodes" WHERE PODCASTID = %s ORDER BY EPISODEPUBDATE ASC LIMIT 1' - else: # MySQL or MariaDB - query = "SELECT EpisodeID FROM Episodes WHERE PodcastID = %s ORDER BY EpisodePubDate ASC LIMIT 1" - print(f'request finish') - cursor.execute(query, (podcast_id,)) - result = cursor.fetchone() - print(f'request result {result}') - if isinstance(result, dict): - return result.get("videoid" if is_youtube else "episodeid") if result else None - elif isinstance(result, tuple): - return result[0] if result else None - else: - return None - finally: - cursor.close() - -def try_fetch_feed(url, username=None, password=None): - headers = { - 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36', - 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8', - 'Accept-Language': 'en-US,en;q=0.9', - # Remove 'Accept-Encoding' to let requests handle decompression automatically - 'Connection': 'keep-alive', - 'Upgrade-Insecure-Requests': '1', - 'Sec-Fetch-Dest': 'document', - 'Sec-Fetch-Mode': 'navigate', - 'Sec-Fetch-Site': 'none', - 'Sec-Fetch-User': '?1' - } - auth = HTTPBasicAuth(username, password) if username and password else None - - # Try the original URL first - try: - response = requests.get( - url, - auth=auth, - headers=headers, - timeout=30, - allow_redirects=True - ) - response.raise_for_status() - - # Check content type to ensure we're getting XML or text - content_type = response.headers.get('Content-Type', '') - if 'xml' not in content_type.lower() and 'text' not in content_type.lower(): - print(f"Warning: Unexpected content type: {content_type}") - - # Get the text rather than binary content - # Try first with detected encoding, fall back to UTF-8 - try: - return response.text - except UnicodeDecodeError: - return response.content.decode('utf-8', errors='replace') - except RequestException as e: - print(f"Error fetching {url}: {str(e)}") - - # If original URL failed and starts with https, try with http - if url.startswith('https://'): - http_url = 'http://' + url[8:] # Replace https:// with http:// - print(f"Trying HTTP fallback: {http_url}") - try: - response = requests.get( - http_url, - auth=auth, - headers=headers, - timeout=30, - allow_redirects=True - ) - response.raise_for_status() - return response.content - except RequestException as e2: - print(f"Error fetching HTTP fallback {http_url}: {str(e2)}") - - # Original URL and HTTP fallback both failed - return None - -def parse_duration(duration_string: str) -> int: - # First, check if duration is in seconds (no colons) - if ':' not in duration_string: - try: - # Directly return seconds if no colon is found - return int(duration_string) - except ValueError: - print(f'Error parsing duration from pure seconds: {duration_string}') - return 0 # Return 0 or some default value in case of error - else: - # Handle HH:MM:SS format - parts = duration_string.split(':') - if len(parts) == 1: - # If there's only one part, it's in seconds - return int(parts[0]) - else: - while len(parts) < 3: - parts.insert(0, '0') # Prepend zeros if any parts are missing (ensuring HH:MM:SS format) - h, m, s = map(int, parts) - return h * 3600 + m * 60 + s - -def update_episode_count(cnx, database_type, podcast_id): - """Recalculate and update episode count for a podcast""" - cursor = cnx.cursor() - print(f'Updating episode count for podcast {podcast_id}') - try: - # Count both regular episodes and YouTube videos - if database_type == "postgresql": - episode_count_query = 'SELECT COUNT(*) FROM "Episodes" WHERE PodcastID = %s' - video_count_query = 'SELECT COUNT(*) FROM "YouTubeVideos" WHERE PodcastID = %s' - update_query = 'UPDATE "Podcasts" SET EpisodeCount = %s WHERE PodcastID = %s' - verify_query = 'SELECT EpisodeCount FROM "Podcasts" WHERE PodcastID = %s' - else: # MySQL or MariaDB - episode_count_query = 'SELECT COUNT(*) FROM Episodes WHERE PodcastID = %s' - video_count_query = 'SELECT COUNT(*) FROM YouTubeVideos WHERE PodcastID = %s' - update_query = "UPDATE Podcasts SET EpisodeCount = %s WHERE PodcastID = %s" - verify_query = 'SELECT EpisodeCount FROM Podcasts WHERE PodcastID = %s' - - # Get episode count - cursor.execute(episode_count_query, (podcast_id,)) - episode_result = cursor.fetchone() - episode_count = 0 - if episode_result: - if isinstance(episode_result, tuple): - episode_count = episode_result[0] - elif isinstance(episode_result, dict): - episode_count = episode_result["count"] - - # Get video count - cursor.execute(video_count_query, (podcast_id,)) - video_result = cursor.fetchone() - video_count = 0 - if video_result: - if isinstance(video_result, tuple): - video_count = video_result[0] - elif isinstance(video_result, dict): - video_count = video_result["count"] - - # Total count - total_count = episode_count + video_count - # Update total count - cursor.execute(update_query, (total_count, podcast_id)) - # Verify the update - cursor.execute(verify_query, (podcast_id,)) - verify_result = cursor.fetchone() - final_count = 0 - if verify_result: - if isinstance(verify_result, tuple): - final_count = verify_result[0] - elif isinstance(verify_result, dict): - final_count = verify_result["episodecount"] - - cnx.commit() - - except Exception as e: - print(f'Error updating content count for podcast {podcast_id}: {str(e)}') - cnx.rollback() - raise - finally: - cursor.close() - -def add_episodes(cnx, database_type, podcast_id, feed_url, artwork_url, auto_download, username=None, password=None, websocket=False): - import feedparser - first_episode_id = None - - # Try to fetch the feed - content = try_fetch_feed(feed_url, username, password) - - if content is None: - # If the original URL fails, try switching between www and non-www - parsed_url = urlparse(feed_url) - if parsed_url.netloc.startswith('www.'): - alternate_netloc = parsed_url.netloc[4:] - else: - alternate_netloc = 'www.' + parsed_url.netloc - - alternate_url = urlunparse(parsed_url._replace(netloc=alternate_netloc)) - content = try_fetch_feed(alternate_url, username, password) - - if content is None: - raise ValueError(f"Failed to fetch feed from both {feed_url} and its www/non-www alternative") - - episode_dump = feedparser.parse(content) - - cursor = cnx.cursor() - - new_episodes = [] - - for entry in episode_dump.entries: - # Check necessary fields are present - if not all(hasattr(entry, attr) for attr in ["title", "summary", "enclosures"]): - continue - - # Title is required - if missing, skip this episode - if not hasattr(entry, 'title') or not entry.title: - continue - - parsed_title = entry.title - - # Description - use placeholder if missing - parsed_description = entry.get('content', [{}])[0].get('value') or entry.get('summary') or "No description available" - - # Audio URL can be empty (non-audio posts are allowed) - parsed_audio_url = entry.enclosures[0].href if entry.enclosures else "" - - # Release date - use current time as fallback if parsing fails - try: - parsed_release_datetime = dateutil.parser.parse(entry.published).strftime("%Y-%m-%d %H:%M:%S") - except (AttributeError, ValueError): - parsed_release_datetime = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") - - # Artwork - use placeholders based on feed name/episode number - parsed_artwork_url = (entry.get('itunes_image', {}).get('href') or - getattr(entry, 'image', {}).get('href') or - artwork_url or # This is the podcast's default artwork - '/static/assets/default-episode.png') # Final fallback artwork - - # Duration parsing - def estimate_duration_from_file_size(file_size_bytes, bitrate_kbps=128): - """ - Estimate duration in seconds based on file size and bitrate. - - Args: - file_size_bytes (int): Size of the media file in bytes - bitrate_kbps (int): Bitrate in kilobits per second (default: 128) - - Returns: - int: Estimated duration in seconds - """ - bytes_per_second = (bitrate_kbps * 1000) / 8 # Convert kbps to bytes per second - return int(file_size_bytes / bytes_per_second) - - # Duration parsing section for the add_episodes function - parsed_duration = 0 - duration_str = getattr(entry, 'itunes_duration', '') - if ':' in duration_str: - # If duration contains ":", then process as HH:MM:SS or MM:SS - time_parts = list(map(int, duration_str.split(':'))) - while len(time_parts) < 3: - time_parts.insert(0, 0) # Pad missing values with zeros - - # Fix for handling more than 3 time parts - if len(time_parts) > 3: - print(f"Warning: Duration string '{duration_str}' has more than 3 parts, using first 3") - h, m, s = time_parts[0], time_parts[1], time_parts[2] - else: - h, m, s = time_parts - - parsed_duration = h * 3600 + m * 60 + s - elif duration_str.isdigit(): - # If duration is all digits (no ":"), treat as seconds directly - parsed_duration = int(duration_str) - elif hasattr(entry, 'itunes_duration_seconds'): - # Additional format as fallback, if explicitly provided as seconds - parsed_duration = int(entry.itunes_duration_seconds) - elif hasattr(entry, 'duration'): - # Other specified duration formats (assume they are in correct format or seconds) - parsed_duration = parse_duration(entry.duration) - elif hasattr(entry, 'length'): - # If duration not specified but length is, use length (assuming it's in seconds) - parsed_duration = int(entry.length) - # Check for enclosure length attribute as a last resort - elif entry.enclosures and len(entry.enclosures) > 0: - enclosure = entry.enclosures[0] - if hasattr(enclosure, 'length') and enclosure.length: - try: - file_size = int(enclosure.length) - # Only estimate if the size seems reasonable (to avoid errors) - if file_size > 1000000: # Only consider files larger than 1MB - parsed_duration = estimate_duration_from_file_size(file_size) - # print(f"Estimated duration from file size {file_size} bytes: {parsed_duration} seconds") - except (ValueError, TypeError) as e: - print(f"Error parsing enclosure length: {e}") - - - # Check for existing episode - if database_type == "postgresql": - episode_check_query = 'SELECT * FROM "Episodes" WHERE PodcastID = %s AND EpisodeTitle = %s' - else: # MySQL or MariaDB - episode_check_query = "SELECT * FROM Episodes WHERE PodcastID = %s AND EpisodeTitle = %s" - - cursor.execute(episode_check_query, (podcast_id, parsed_title)) - if cursor.fetchone(): - continue # Episode already exists - print("inserting now") - # Insert the new episode - if database_type == "postgresql": - episode_insert_query = """ - INSERT INTO "Episodes" - (PodcastID, EpisodeTitle, EpisodeDescription, EpisodeURL, EpisodeArtwork, EpisodePubDate, EpisodeDuration) - VALUES (%s, %s, %s, %s, %s, %s, %s) - """ - else: # MySQL or MariaDB - episode_insert_query = """ - INSERT INTO Episodes - (PodcastID, EpisodeTitle, EpisodeDescription, EpisodeURL, EpisodeArtwork, EpisodePubDate, EpisodeDuration) - VALUES (%s, %s, %s, %s, %s, %s, %s) - """ - - cursor.execute(episode_insert_query, (podcast_id, parsed_title, parsed_description, parsed_audio_url, parsed_artwork_url, parsed_release_datetime, parsed_duration)) - print('episodes inserted') - update_episode_count(cnx, database_type, podcast_id) - # Get the EpisodeID for the newly added episode - if cursor.rowcount > 0: - print(f"Added episode '{parsed_title}'") - check_and_send_notification(cnx, database_type, podcast_id, parsed_title) - if websocket: - # Get the episode ID using a SELECT query right after insert - if database_type == "postgresql": - cursor.execute(""" - SELECT EpisodeID FROM "Episodes" - WHERE PodcastID = %s AND EpisodeTitle = %s AND EpisodeURL = %s - """, (podcast_id, parsed_title, parsed_audio_url)) - else: - cursor.execute(""" - SELECT EpisodeID FROM Episodes - WHERE PodcastID = %s AND EpisodeTitle = %s AND EpisodeURL = %s - """, (podcast_id, parsed_title, parsed_audio_url)) - - episode_id = cursor.fetchone() - if isinstance(episode_id, dict): - episode_id = episode_id.get('episodeid') - elif isinstance(episode_id, tuple): - episode_id = episode_id[0] - - episode_data = { - "episode_id": episode_id, - "podcast_id": podcast_id, - "title": parsed_title, - "description": parsed_description, - "audio_url": parsed_audio_url, - "artwork_url": parsed_artwork_url, - "release_datetime": parsed_release_datetime, - "duration": parsed_duration, - "completed": False # Assuming this is the default for new episodes - } - new_episodes.append(episode_data) - if auto_download: # Check if auto-download is enabled - episode_id = get_episode_id(cnx, database_type, podcast_id, parsed_title, parsed_audio_url) - - user_id = get_user_id_from_pod_id(cnx, database_type, podcast_id) - # Call your download function here - download_podcast(cnx, database_type, episode_id, user_id) - - cnx.commit() - - # Now, retrieve the first episode ID - if not websocket and first_episode_id is None: - print(f'getting first id pre') - first_episode_id = get_first_episode_id(cnx, database_type, podcast_id) - print(f'first result {first_episode_id}') - if websocket: - return new_episodes - return first_episode_id - - - - -def check_existing_channel_subscription(cnx, database_type: str, channel_id: str, user_id: int) -> Optional[int]: - """Check if user is already subscribed to this channel""" - cursor = cnx.cursor() - try: - if database_type == "postgresql": - query = """ - SELECT PodcastID FROM "Podcasts" - WHERE WebsiteURL = %s AND UserID = %s - """ - else: # MariaDB - query = """ - SELECT PodcastID FROM Podcasts - WHERE WebsiteURL = %s AND UserID = %s - """ - - cursor.execute(query, (f"https://www.youtube.com/channel/{channel_id}", user_id)) - result = cursor.fetchone() - return result[0] if result else None - except Exception as e: - raise e - -def add_youtube_channel(cnx, database_type: str, channel_info: dict, user_id: int, feed_cutoff: int) -> int: - """Add YouTube channel to Podcasts table""" - cursor = cnx.cursor() - try: - if database_type == "postgresql": - query = """ - INSERT INTO "Podcasts" ( - PodcastName, FeedURL, ArtworkURL, Author, Description, - WebsiteURL, UserID, IsYouTubeChannel, Categories, FeedCutoffDays - ) VALUES (%s, %s, %s, %s, %s, %s, %s, TRUE, %s, %s) - RETURNING PodcastID - """ - else: # MariaDB - query = """ - INSERT INTO Podcasts ( - PodcastName, FeedURL, ArtworkURL, Author, Description, - WebsiteURL, UserID, IsYouTubeChannel, Categories, FeedCutoffDays - ) VALUES (%s, %s, %s, %s, %s, %s, %s, 1, %s, %s) - """ - - values = ( - channel_info['name'], - f"https://www.youtube.com/channel/{channel_info['channel_id']}", - channel_info['thumbnail_url'], - channel_info['name'], - channel_info['description'], - f"https://www.youtube.com/channel/{channel_info['channel_id']}", - user_id, - "", - feed_cutoff - ) - - cursor.execute(query, values) - if database_type == "postgresql": - result = cursor.fetchone() - if result is None: - raise ValueError("No result returned from insert") - # Handle both tuple and dict return types - if isinstance(result, dict): - podcast_id = result.get('podcastid') - if podcast_id is None: - raise ValueError("No podcast ID in result dict") - else: # it's a tuple - podcast_id = result[0] - cnx.commit() # Add this line for PostgreSQL - else: # MariaDB - podcast_id = cursor.lastrowid - cnx.commit() - return podcast_id - except Exception as e: - print(f"Error in add_youtube_channel: {str(e)}") - cnx.rollback() - raise e - -def add_youtube_videos(cnx, database_type: str, podcast_id: int, videos: list): - """Add YouTube videos to YouTubeVideos table""" - cursor = cnx.cursor() - try: - if database_type == "postgresql": - query = """ - INSERT INTO "YouTubeVideos" ( - PodcastID, VideoTitle, VideoDescription, - VideoURL, ThumbnailURL, PublishedAt, - Duration, YouTubeVideoID - ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s) - """ - else: # MariaDB - query = """ - INSERT INTO YouTubeVideos ( - PodcastID, VideoTitle, VideoDescription, - VideoURL, ThumbnailURL, PublishedAt, - Duration, YouTubeVideoID - ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s) - """ - - for video in videos: - cursor.execute(query, ( - podcast_id, - video['title'], - video['description'], - video['url'], - video['thumbnail'], - video['publish_date'], - video['duration'], - video['id'] - )) - # Update episode count for each video added - update_episode_count(cnx, database_type, podcast_id) - - cnx.commit() - except Exception as e: - cnx.rollback() - raise e - -def cleanup_old_youtube_videos(cnx, database_type): - """Periodically cleanup old YouTube videos for all channels""" - import logging - - logger = logging.getLogger(__name__) - cursor = cnx.cursor() - - try: - # Get all YouTube channels and their cutoff settings - if database_type == "postgresql": - query = """ - SELECT PodcastID, FeedCutoffDays - FROM "Podcasts" - WHERE IsYouTubeChannel = TRUE AND FeedCutoffDays > 0 - """ - else: - query = """ - SELECT PodcastID, FeedCutoffDays - FROM Podcasts - WHERE IsYouTubeChannel = TRUE AND FeedCutoffDays > 0 - """ - - cursor.execute(query) - channels = cursor.fetchall() - - for channel in channels: - podcast_id = channel[0] if isinstance(channel, tuple) else channel["podcastid"] if database_type == "postgresql" else channel["PodcastID"] - feed_cutoff = channel[1] if isinstance(channel, tuple) else channel["feedcutoffdays"] if database_type == "postgresql" else channel["FeedCutoffDays"] - - cutoff_date = datetime.datetime.now(datetime.timezone.utc) - timedelta(days=feed_cutoff) - logger.info(f"Cleaning up channel {podcast_id} with cutoff {feed_cutoff} days") - - remove_old_youtube_videos(cnx, database_type, podcast_id, cutoff_date) - - except Exception as e: - logger.error(f"Error during YouTube cleanup: {str(e)}") - raise e - finally: - cursor.close() - -def remove_old_youtube_videos(cnx, database_type: str, podcast_id: int, cutoff_date: datetime.datetime): - """Remove YouTube videos older than cutoff date and their associated files""" - import os - import logging - - logger = logging.getLogger(__name__) - cursor = cnx.cursor() - - try: - # First, get all videos older than cutoff date - if database_type == "postgresql": - query = """ - SELECT VideoID, YouTubeVideoID, VideoURL - FROM "YouTubeVideos" - WHERE PodcastID = %s AND PublishedAt < %s - """ - else: - query = """ - SELECT VideoID, YouTubeVideoID, VideoURL - FROM YouTubeVideos - WHERE PodcastID = %s AND PublishedAt < %s - """ - - cursor.execute(query, (podcast_id, cutoff_date)) - old_videos = cursor.fetchall() - - if not old_videos: - logger.info(f"No videos to remove for podcast {podcast_id}") - return - - # Extract the VideoID list for database cleanup - video_ids = [] - youtube_video_ids = [] - - for video in old_videos: - if isinstance(video, tuple): - video_id, youtube_video_id, _ = video - else: - if database_type == "postgresql": - video_id = video["videoid"] - youtube_video_id = video["youtubevideoid"] - else: - video_id = video["VideoID"] - youtube_video_id = video["YouTubeVideoID"] - - video_ids.append(video_id) - youtube_video_ids.append(youtube_video_id) - - # Delete the MP3 file - file_paths = [ - f"/opt/pinepods/downloads/youtube/{youtube_video_id}.mp3", - f"/opt/pinepods/downloads/youtube/{youtube_video_id}.mp3.mp3" # In case of double extension - ] - - for file_path in file_paths: - if os.path.exists(file_path): - try: - os.remove(file_path) - logger.info(f"Deleted file: {file_path}") - except Exception as e: - logger.error(f"Failed to delete file {file_path}: {str(e)}") - - # Now clean up all references to these videos in other tables - if video_ids: # Only proceed if we have videos to delete - # Create placeholders for the IN clause - placeholders = ','.join(['%s'] * len(video_ids)) - - if database_type == "postgresql": - # Delete from all related YouTube video tables - delete_playlist_contents = f'DELETE FROM "PlaylistContents" WHERE VideoID IN ({placeholders})' - delete_history = f'DELETE FROM "UserVideoHistory" WHERE VideoID IN ({placeholders})' - delete_downloaded = f'DELETE FROM "DownloadedVideos" WHERE VideoID IN ({placeholders})' - delete_saved = f'DELETE FROM "SavedVideos" WHERE VideoID IN ({placeholders})' - delete_queue = f'DELETE FROM "EpisodeQueue" WHERE EpisodeID IN ({placeholders}) AND is_youtube = true' - delete_videos = f'DELETE FROM "YouTubeVideos" WHERE VideoID IN ({placeholders})' - else: - # Delete from all related YouTube video tables - delete_playlist_contents = f'DELETE FROM PlaylistContents WHERE VideoID IN ({placeholders})' - delete_history = f'DELETE FROM UserVideoHistory WHERE VideoID IN ({placeholders})' - delete_downloaded = f'DELETE FROM DownloadedVideos WHERE VideoID IN ({placeholders})' - delete_saved = f'DELETE FROM SavedVideos WHERE VideoID IN ({placeholders})' - delete_queue = f'DELETE FROM EpisodeQueue WHERE EpisodeID IN ({placeholders}) AND is_youtube = 1' - delete_videos = f'DELETE FROM YouTubeVideos WHERE VideoID IN ({placeholders})' - - # Execute all deletion statements - cursor.execute(delete_playlist_contents, video_ids) - logger.info(f"Deleted playlist content references for {cursor.rowcount} videos") - - cursor.execute(delete_history, video_ids) - logger.info(f"Deleted history entries for {cursor.rowcount} videos") - - cursor.execute(delete_downloaded, video_ids) - logger.info(f"Deleted downloaded entries for {cursor.rowcount} videos") - - cursor.execute(delete_saved, video_ids) - logger.info(f"Deleted saved entries for {cursor.rowcount} videos") - - cursor.execute(delete_queue, video_ids) - logger.info(f"Deleted queue entries for {cursor.rowcount} videos") - - cursor.execute(delete_videos, video_ids) - logger.info(f"Deleted {cursor.rowcount} videos from YouTubeVideos table") - - # Update episode count - update_episode_count(cnx, database_type, podcast_id) - - cnx.commit() - logger.info(f"Successfully removed {len(video_ids)} old videos and all references for podcast {podcast_id}") - - except Exception as e: - cnx.rollback() - logger.error(f"Error removing old YouTube videos for podcast {podcast_id}: {str(e)}") - raise e - finally: - cursor.close() - -def add_people_episodes(cnx, database_type, person_id: int, podcast_id: int, feed_url: str): - import feedparser - import dateutil.parser - try: - headers = { - 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3', - 'Accept-Language': 'en-US,en;q=0.9' - } - content = feedparser.parse(feed_url, request_headers=headers) - cursor = cnx.cursor() - - # Start a transaction - if database_type == "postgresql": - cursor.execute("BEGIN") - - # Get existing episode IDs before processing - if database_type == "postgresql": - existing_query = """ - SELECT EpisodeID FROM "PeopleEpisodes" - WHERE PersonID = %s::integer - AND PodcastID = %s::integer - """ - else: - existing_query = """ - SELECT EpisodeID FROM PeopleEpisodes - WHERE PersonID = %s - AND PodcastID = %s - """ - - cursor.execute(existing_query, (person_id, podcast_id)) - existing_episodes = {row[0] for row in cursor.fetchall()} - processed_episodes = set() - - for entry in content.entries: - if not all(hasattr(entry, attr) for attr in ["title", "summary"]): - continue - - # Extract episode information using more robust parsing - parsed_title = entry.title - parsed_description = entry.get('content', [{}])[0].get('value', entry.summary) - - # Get audio URL from enclosures - parsed_audio_url = "" - for enclosure in entry.get('enclosures', []): - if enclosure.get('type', '').startswith('audio/'): - parsed_audio_url = enclosure.get('href', '') - break - - if not parsed_audio_url: - continue - - # Parse publish date - try: - parsed_release_datetime = dateutil.parser.parse(entry.published).strftime("%Y-%m-%d %H:%M:%S") - except (AttributeError, ValueError): - parsed_release_datetime = datetime.now().strftime("%Y-%m-%d %H:%M:%S") - - # Get artwork URL with fallbacks - parsed_artwork_url = (entry.get('itunes_image', {}).get('href') or - getattr(entry, 'image', {}).get('href')) - - # Duration parsing with multiple fallbacks - parsed_duration = 0 - duration_str = getattr(entry, 'itunes_duration', '') - if ':' in duration_str: - time_parts = list(map(int, duration_str.split(':'))) - while len(time_parts) < 3: - time_parts.insert(0, 0) - h, m, s = time_parts - parsed_duration = h * 3600 + m * 60 + s - elif duration_str.isdigit(): - parsed_duration = int(duration_str) - elif hasattr(entry, 'itunes_duration_seconds'): - parsed_duration = int(entry.itunes_duration_seconds) - elif hasattr(entry, 'duration'): - parsed_duration = parse_duration(entry.duration) - elif hasattr(entry, 'length'): - parsed_duration = int(entry.length) - - try: - # Check for existing episode - if database_type == "postgresql": - episode_check_query = """ - SELECT EpisodeID FROM "PeopleEpisodes" - WHERE PersonID = %s::integer - AND PodcastID = %s::integer - AND EpisodeURL = %s - """ - else: - episode_check_query = """ - SELECT EpisodeID FROM PeopleEpisodes - WHERE PersonID = %s - AND PodcastID = %s - AND EpisodeURL = %s - """ - - cursor.execute(episode_check_query, (person_id, podcast_id, parsed_audio_url)) - episode_result = cursor.fetchone() - - if episode_result: - episode_id = episode_result[0] - processed_episodes.add(episode_id) - continue - - # Insert new episode - if database_type == "postgresql": - insert_query = """ - INSERT INTO "PeopleEpisodes" - (PersonID, PodcastID, EpisodeTitle, EpisodeDescription, - EpisodeURL, EpisodeArtwork, EpisodePubDate, EpisodeDuration) - VALUES (%s::integer, %s::integer, %s, %s, %s, %s, %s, %s) - RETURNING EpisodeID - """ - else: - insert_query = """ - INSERT INTO PeopleEpisodes - (PersonID, PodcastID, EpisodeTitle, EpisodeDescription, - EpisodeURL, EpisodeArtwork, EpisodePubDate, EpisodeDuration) - VALUES (%s, %s, %s, %s, %s, %s, %s, %s) - """ - - cursor.execute(insert_query, ( - person_id, - podcast_id, - parsed_title, - parsed_description, - parsed_audio_url, - parsed_artwork_url, - parsed_release_datetime, - parsed_duration - )) - - # Get the ID of the newly inserted episode - if database_type == "postgresql": - new_episode_id = cursor.fetchone()[0] - else: - cursor.execute('SELECT LAST_INSERT_ID()') - new_episode_id = cursor.fetchone()[0] - - processed_episodes.add(new_episode_id) - - except Exception as e: - logging.debug(f"Skipping episode '{parsed_title}' during person podcast import - {str(e)}") - continue - - # Clean up old episodes - episodes_to_delete = existing_episodes - processed_episodes - if episodes_to_delete: - if database_type == "postgresql": - delete_query = """ - DELETE FROM "PeopleEpisodes" - WHERE PersonID = %s::integer - AND PodcastID = %s::integer - AND EpisodeID = ANY(%s) - AND EpisodePubDate < NOW() - INTERVAL '30 days' - """ - cursor.execute(delete_query, (person_id, podcast_id, list(episodes_to_delete))) - else: - if episodes_to_delete: # Only proceed if there are episodes to delete - placeholders = ','.join(['%s'] * len(episodes_to_delete)) - delete_query = f""" - DELETE FROM PeopleEpisodes - WHERE PersonID = %s - AND PodcastID = %s - AND EpisodeID IN ({placeholders}) - AND EpisodePubDate < DATE_SUB(NOW(), INTERVAL 30 DAY) - """ - cursor.execute(delete_query, (person_id, podcast_id) + tuple(episodes_to_delete)) - - cnx.commit() - - except Exception as e: - if database_type == "postgresql": - cursor.execute("ROLLBACK") - else: - cnx.rollback() - logging.error(f"Error processing feed {feed_url}: {str(e)}") - raise - - finally: - cursor.close() - -def remove_youtube_channel_by_url(cnx, database_type, channel_name, channel_url, user_id): - cursor = cnx.cursor() - print('got to remove youtube channel') - try: - # Get the PodcastID first - if database_type == "postgresql": - select_podcast_id = ''' - SELECT PodcastID - FROM "Podcasts" - WHERE PodcastName = %s - AND FeedURL = %s - AND UserID = %s - AND IsYouTubeChannel = TRUE - ''' - else: # MySQL or MariaDB - select_podcast_id = ''' - SELECT PodcastID - FROM Podcasts - WHERE PodcastName = %s - AND FeedURL = %s - AND UserID = %s - AND IsYouTubeChannel = TRUE - ''' - cursor.execute(select_podcast_id, (channel_name, channel_url, user_id)) - result = cursor.fetchone() - if result: - podcast_id = result[0] if not isinstance(result, dict) else result.get('podcastid') - else: - raise ValueError(f"No YouTube channel found with name {channel_name}") - - # Get all video IDs for the podcast so we can delete the files - if database_type == "postgresql": - get_video_ids_query = 'SELECT YouTubeVideoID FROM "YouTubeVideos" WHERE PodcastID = %s' - else: # MySQL or MariaDB - get_video_ids_query = "SELECT YouTubeVideoID FROM YouTubeVideos WHERE PodcastID = %s" - - cursor.execute(get_video_ids_query, (podcast_id,)) - video_ids = cursor.fetchall() - - # Delete the MP3 files for each video - for video_id in video_ids: - if isinstance(video_id, tuple): - video_id_str = video_id[0] - else: # dict - video_id_str = video_id["youtubevideoid"] - - # Delete the MP3 file - file_paths = [ - f"/opt/pinepods/downloads/youtube/{video_id_str}.mp3", - f"/opt/pinepods/downloads/youtube/{video_id_str}.mp3.mp3" # In case of double extension - ] - - for file_path in file_paths: - if os.path.exists(file_path): - try: - os.remove(file_path) - print(f"Deleted file: {file_path}") - except Exception as e: - print(f"Failed to delete file {file_path}: {str(e)}") - - # Delete related data - now including all tables - if database_type == "postgresql": - delete_queries = [ - ('DELETE FROM "PlaylistContents" WHERE EpisodeID IN (SELECT VideoID FROM "YouTubeVideos" WHERE PodcastID = %s)', (podcast_id,)), - ('DELETE FROM "UserEpisodeHistory" WHERE EpisodeID IN (SELECT VideoID FROM "YouTubeVideos" WHERE PodcastID = %s)', (podcast_id,)), - ('DELETE FROM "UserVideoHistory" WHERE VideoID IN (SELECT VideoID FROM "YouTubeVideos" WHERE PodcastID = %s)', (podcast_id,)), - ('DELETE FROM "DownloadedEpisodes" WHERE EpisodeID IN (SELECT VideoID FROM "YouTubeVideos" WHERE PodcastID = %s)', (podcast_id,)), - ('DELETE FROM "DownloadedVideos" WHERE VideoID IN (SELECT VideoID FROM "YouTubeVideos" WHERE PodcastID = %s)', (podcast_id,)), - ('DELETE FROM "SavedVideos" WHERE VideoID IN (SELECT VideoID FROM "YouTubeVideos" WHERE PodcastID = %s)', (podcast_id,)), - ('DELETE FROM "SavedEpisodes" WHERE EpisodeID IN (SELECT VideoID FROM "YouTubeVideos" WHERE PodcastID = %s)', (podcast_id,)), - ('DELETE FROM "EpisodeQueue" WHERE EpisodeID IN (SELECT VideoID FROM "YouTubeVideos" WHERE PodcastID = %s)', (podcast_id,)), - ('DELETE FROM "YouTubeVideos" WHERE PodcastID = %s', (podcast_id,)), - ('DELETE FROM "Podcasts" WHERE PodcastID = %s AND IsYouTubeChannel = TRUE', (podcast_id,)) - ] - else: # MySQL or MariaDB - delete_queries = [ - ("DELETE FROM PlaylistContents WHERE EpisodeID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s)", (podcast_id,)), - ("DELETE FROM UserEpisodeHistory WHERE EpisodeID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s)", (podcast_id,)), - ("DELETE FROM UserVideoHistory WHERE VideoID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s)", (podcast_id,)), - ("DELETE FROM DownloadedEpisodes WHERE EpisodeID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s)", (podcast_id,)), - ("DELETE FROM DownloadedVideos WHERE VideoID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s)", (podcast_id,)), - ("DELETE FROM SavedVideos WHERE VideoID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s)", (podcast_id,)), - ("DELETE FROM SavedEpisodes WHERE EpisodeID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s)", (podcast_id,)), - ("DELETE FROM EpisodeQueue WHERE EpisodeID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s)", (podcast_id,)), - ("DELETE FROM YouTubeVideos WHERE PodcastID = %s", (podcast_id,)), - ("DELETE FROM Podcasts WHERE PodcastID = %s AND IsYouTubeChannel = TRUE", (podcast_id,)) - ] - - for query, params in delete_queries: - cursor.execute(query, params) - - # Update UserStats table - if database_type == "postgresql": - query = 'UPDATE "UserStats" SET PodcastsAdded = GREATEST(PodcastsAdded - 1, 0) WHERE UserID = %s' - else: # MySQL or MariaDB - query = "UPDATE UserStats SET PodcastsAdded = GREATEST(PodcastsAdded - 1, 0) WHERE UserID = %s" - cursor.execute(query, (user_id,)) - - cnx.commit() - except (psycopg.Error, mysql.connector.Error) as err: - print(f"Database Error: {err}") - cnx.rollback() - raise - except Exception as e: - print(f"General Error in remove_youtube_channel_by_url: {e}") - cnx.rollback() - raise - finally: - cursor.close() - -def remove_podcast(cnx, database_type, podcast_name, podcast_url, user_id): - cursor = cnx.cursor() - print('got to remove') - try: - # Get the PodcastID first - if database_type == "postgresql": - select_podcast_id = 'SELECT PodcastID FROM "Podcasts" WHERE PodcastName = %s AND FeedURL = %s AND UserID = %s' - else: # MySQL or MariaDB - select_podcast_id = "SELECT PodcastID FROM Podcasts WHERE PodcastName = %s AND FeedURL = %s AND UserID = %s" - - cursor.execute(select_podcast_id, (podcast_name, podcast_url, user_id)) - result = cursor.fetchone() - - if result: - podcast_id = result[0] if not isinstance(result, dict) else result.get('podcastid') - else: - raise ValueError(f"No podcast found with name {podcast_name}") - - # Special handling for initialization-added feeds - if podcast_url == "https://news.pinepods.online/feed.xml": - # First, delete all related entries manually to avoid foreign key issues - if database_type == "postgresql": - queries = [ - # DELETE FROM PLAYLIST CONTENTS - Add this first! - 'DELETE FROM "PlaylistContents" WHERE EpisodeID IN (SELECT EpisodeID FROM "Episodes" WHERE PodcastID = %s)', - 'DELETE FROM "UserEpisodeHistory" WHERE EpisodeID IN (SELECT EpisodeID FROM "Episodes" WHERE PodcastID = %s)', - 'DELETE FROM "DownloadedEpisodes" WHERE EpisodeID IN (SELECT EpisodeID FROM "Episodes" WHERE PodcastID = %s)', - 'DELETE FROM "SavedEpisodes" WHERE EpisodeID IN (SELECT EpisodeID FROM "Episodes" WHERE PodcastID = %s)', - 'DELETE FROM "EpisodeQueue" WHERE EpisodeID IN (SELECT EpisodeID FROM "Episodes" WHERE PodcastID = %s)', - 'DELETE FROM "Episodes" WHERE PodcastID = %s', - 'DELETE FROM "Podcasts" WHERE PodcastID = %s', - 'UPDATE "AppSettings" SET NewsFeedSubscribed = FALSE' - ] - else: # MySQL or MariaDB - queries = [ - # DELETE FROM PLAYLIST CONTENTS - Add this first! - "DELETE FROM PlaylistContents WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)", - "DELETE FROM UserEpisodeHistory WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)", - "DELETE FROM DownloadedEpisodes WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)", - "DELETE FROM SavedEpisodes WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)", - "DELETE FROM EpisodeQueue WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)", - "SET FOREIGN_KEY_CHECKS = 0", - "DELETE FROM Episodes WHERE PodcastID = %s", - "DELETE FROM Podcasts WHERE PodcastID = %s", - "SET FOREIGN_KEY_CHECKS = 1", - "UPDATE AppSettings SET NewsFeedSubscribed = 0" - ] - - for query in queries: - if query.startswith('SET'): - cursor.execute(query) - elif query.startswith('UPDATE'): - cursor.execute(query) - else: - cursor.execute(query, (podcast_id,)) - - else: - # Normal podcast deletion process - if database_type == "postgresql": - delete_queries = [ - # DELETE FROM PLAYLIST CONTENTS - Add this first! - ('DELETE FROM "PlaylistContents" WHERE EpisodeID IN (SELECT EpisodeID FROM "Episodes" WHERE PodcastID = %s)', (podcast_id,)), - ('DELETE FROM "UserEpisodeHistory" WHERE EpisodeID IN (SELECT EpisodeID FROM "Episodes" WHERE PodcastID = %s)', (podcast_id,)), - ('DELETE FROM "DownloadedEpisodes" WHERE EpisodeID IN (SELECT EpisodeID FROM "Episodes" WHERE PodcastID = %s)', (podcast_id,)), - ('DELETE FROM "SavedEpisodes" WHERE EpisodeID IN (SELECT EpisodeID FROM "Episodes" WHERE PodcastID = %s)', (podcast_id,)), - ('DELETE FROM "EpisodeQueue" WHERE EpisodeID IN (SELECT EpisodeID FROM "Episodes" WHERE PodcastID = %s)', (podcast_id,)), - ('DELETE FROM "Episodes" WHERE PodcastID = %s', (podcast_id,)), - ('DELETE FROM "Podcasts" WHERE PodcastID = %s', (podcast_id,)) - ] - else: # MySQL or MariaDB - delete_queries = [ - # DELETE FROM PLAYLIST CONTENTS - Add this first! - ("DELETE FROM PlaylistContents WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)", (podcast_id,)), - ("DELETE FROM UserEpisodeHistory WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)", (podcast_id,)), - ("DELETE FROM DownloadedEpisodes WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)", (podcast_id,)), - ("DELETE FROM SavedEpisodes WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)", (podcast_id,)), - ("DELETE FROM EpisodeQueue WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)", (podcast_id,)), - ("DELETE FROM Episodes WHERE PodcastID = %s", (podcast_id,)), - ("DELETE FROM Podcasts WHERE PodcastID = %s", (podcast_id,)) - ] - - for query, params in delete_queries: - cursor.execute(query, params) - - # Update UserStats table to decrement PodcastsAdded count - if database_type == "postgresql": - query = 'UPDATE "UserStats" SET PodcastsAdded = GREATEST(PodcastsAdded - 1, 0) WHERE UserID = %s' - else: # MySQL or MariaDB - query = "UPDATE UserStats SET PodcastsAdded = GREATEST(PodcastsAdded - 1, 0) WHERE UserID = %s" - - cursor.execute(query, (user_id,)) - cnx.commit() - - except (psycopg.Error, mysql.connector.Error) as err: - print(f"Database Error: {err}") - cnx.rollback() - raise - except Exception as e: - print(f"General Error in remove_podcast: {e}") - cnx.rollback() - raise - finally: - cursor.close() - - -def remove_podcast_id(cnx, database_type, podcast_id, user_id): - cursor = cnx.cursor() - - try: - # If there's no podcast ID found, raise an error or exit the function early - if podcast_id is None: - raise ValueError("No podcast found with ID {}".format(podcast_id)) - - # Delete user episode history entries associated with the podcast - if database_type == "postgresql": - # DELETE FROM PLAYLIST CONTENTS - Add this first! - delete_playlist_contents = 'DELETE FROM "PlaylistContents" WHERE EpisodeID IN (SELECT EpisodeID FROM "Episodes" WHERE PodcastID = %s)' - delete_history = 'DELETE FROM "UserEpisodeHistory" WHERE EpisodeID IN (SELECT EpisodeID FROM "Episodes" WHERE PodcastID = %s)' - delete_downloaded = 'DELETE FROM "DownloadedEpisodes" WHERE EpisodeID IN (SELECT EpisodeID FROM "Episodes" WHERE PodcastID = %s)' - delete_saved = 'DELETE FROM "SavedEpisodes" WHERE EpisodeID IN (SELECT EpisodeID FROM "Episodes" WHERE PodcastID = %s)' - delete_queue = 'DELETE FROM "EpisodeQueue" WHERE EpisodeID IN (SELECT EpisodeID FROM "Episodes" WHERE PodcastID = %s)' - delete_episodes = 'DELETE FROM "Episodes" WHERE PodcastID = %s' - delete_podcast = 'DELETE FROM "Podcasts" WHERE PodcastID = %s' - update_user_stats = 'UPDATE "UserStats" SET PodcastsAdded = PodcastsAdded - 1 WHERE UserID = %s' - else: # MySQL or MariaDB - # DELETE FROM PLAYLIST CONTENTS - Add this first! - delete_playlist_contents = "DELETE FROM PlaylistContents WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)" - delete_history = "DELETE FROM UserEpisodeHistory WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)" - delete_downloaded = "DELETE FROM DownloadedEpisodes WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)" - delete_saved = "DELETE FROM SavedEpisodes WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)" - delete_queue = "DELETE FROM EpisodeQueue WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)" - delete_episodes = "DELETE FROM Episodes WHERE PodcastID = %s" - delete_podcast = "DELETE FROM Podcasts WHERE PodcastID = %s" - update_user_stats = "UPDATE UserStats SET PodcastsAdded = PodcastsAdded - 1 WHERE UserID = %s" - - # Execute the deletion statements in order - cursor.execute(delete_playlist_contents, (podcast_id,)) - cursor.execute(delete_history, (podcast_id,)) - cursor.execute(delete_downloaded, (podcast_id,)) - cursor.execute(delete_saved, (podcast_id,)) - cursor.execute(delete_queue, (podcast_id,)) - cursor.execute(delete_episodes, (podcast_id,)) - cursor.execute(delete_podcast, (podcast_id,)) - cursor.execute(update_user_stats, (user_id,)) - - cnx.commit() - except (psycopg.Error, mysql.connector.Error) as err: - print("Error: {}".format(err)) - cnx.rollback() - finally: - cursor.close() - -def remove_youtube_channel(cnx, database_type, podcast_id, user_id): - cursor = cnx.cursor() - try: - # First, get all video IDs for the podcast so we can delete the files - if database_type == "postgresql": - get_video_ids_query = 'SELECT YouTubeVideoID FROM "YouTubeVideos" WHERE PodcastID = %s' - else: # MySQL or MariaDB - get_video_ids_query = "SELECT YouTubeVideoID FROM YouTubeVideos WHERE PodcastID = %s" - - cursor.execute(get_video_ids_query, (podcast_id,)) - video_ids = cursor.fetchall() - - # Delete the MP3 files for each video - for video_id in video_ids: - if isinstance(video_id, tuple): - video_id_str = video_id[0] - else: # dict - video_id_str = video_id["youtubevideoid"] - - # Delete the MP3 file - file_paths = [ - f"/opt/pinepods/downloads/youtube/{video_id_str}.mp3", - f"/opt/pinepods/downloads/youtube/{video_id_str}.mp3.mp3" # In case of double extension - ] - - for file_path in file_paths: - if os.path.exists(file_path): - try: - os.remove(file_path) - print(f"Deleted file: {file_path}") - except Exception as e: - print(f"Failed to delete file {file_path}: {str(e)}") - - # Delete from the related tables - if database_type == "postgresql": - delete_playlist_contents = 'DELETE FROM "PlaylistContents" WHERE EpisodeID IN (SELECT VideoID FROM "YouTubeVideos" WHERE PodcastID = %s)' - delete_history = 'DELETE FROM "UserEpisodeHistory" WHERE EpisodeID IN (SELECT VideoID FROM "YouTubeVideos" WHERE PodcastID = %s)' - delete_video_history = 'DELETE FROM "UserVideoHistory" WHERE VideoID IN (SELECT VideoID FROM "YouTubeVideos" WHERE PodcastID = %s)' - delete_downloaded = 'DELETE FROM "DownloadedEpisodes" WHERE EpisodeID IN (SELECT VideoID FROM "YouTubeVideos" WHERE PodcastID = %s)' - delete_downloaded_videos = 'DELETE FROM "DownloadedVideos" WHERE VideoID IN (SELECT VideoID FROM "YouTubeVideos" WHERE PodcastID = %s)' - delete_saved_videos = 'DELETE FROM "SavedVideos" WHERE VideoID IN (SELECT VideoID FROM "YouTubeVideos" WHERE PodcastID = %s)' - delete_saved = 'DELETE FROM "SavedEpisodes" WHERE EpisodeID IN (SELECT VideoID FROM "YouTubeVideos" WHERE PodcastID = %s)' - delete_queue = 'DELETE FROM "EpisodeQueue" WHERE EpisodeID IN (SELECT VideoID FROM "YouTubeVideos" WHERE PodcastID = %s)' - delete_videos = 'DELETE FROM "YouTubeVideos" WHERE PodcastID = %s' - delete_podcast = 'DELETE FROM "Podcasts" WHERE PodcastID = %s AND IsYouTubeChannel = TRUE' - update_user_stats = 'UPDATE "UserStats" SET PodcastsAdded = PodcastsAdded - 1 WHERE UserID = %s' - else: # MySQL or MariaDB - delete_playlist_contents = "DELETE FROM PlaylistContents WHERE EpisodeID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s)" - delete_history = "DELETE FROM UserEpisodeHistory WHERE EpisodeID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s)" - delete_video_history = "DELETE FROM UserVideoHistory WHERE VideoID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s)" - delete_downloaded = "DELETE FROM DownloadedEpisodes WHERE EpisodeID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s)" - delete_downloaded_videos = "DELETE FROM DownloadedVideos WHERE VideoID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s)" - delete_saved_videos = "DELETE FROM SavedVideos WHERE VideoID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s)" - delete_saved = "DELETE FROM SavedEpisodes WHERE EpisodeID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s)" - delete_queue = "DELETE FROM EpisodeQueue WHERE EpisodeID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s)" - delete_videos = "DELETE FROM YouTubeVideos WHERE PodcastID = %s" - delete_podcast = "DELETE FROM Podcasts WHERE PodcastID = %s AND IsYouTubeChannel = TRUE" - update_user_stats = "UPDATE UserStats SET PodcastsAdded = PodcastsAdded - 1 WHERE UserID = %s" - - # Execute the deletion statements in order - cursor.execute(delete_playlist_contents, (podcast_id,)) - cursor.execute(delete_history, (podcast_id,)) - cursor.execute(delete_video_history, (podcast_id,)) - cursor.execute(delete_downloaded, (podcast_id,)) - cursor.execute(delete_downloaded_videos, (podcast_id,)) - cursor.execute(delete_saved_videos, (podcast_id,)) - cursor.execute(delete_saved, (podcast_id,)) - cursor.execute(delete_queue, (podcast_id,)) - cursor.execute(delete_videos, (podcast_id,)) - cursor.execute(delete_podcast, (podcast_id,)) - cursor.execute(update_user_stats, (user_id,)) - - cnx.commit() - except (psycopg.Error, mysql.connector.Error) as err: - print("Error: {}".format(err)) - cnx.rollback() - finally: - cursor.close() - -def return_episodes(database_type, cnx, user_id): - if database_type == "postgresql": - cnx.row_factory = dict_row - cursor = cnx.cursor() - query = """ - SELECT * FROM ( - SELECT - "Podcasts".PodcastName as podcastname, - "Episodes".EpisodeTitle as episodetitle, - "Episodes".EpisodePubDate as episodepubdate, - "Episodes".EpisodeDescription as episodedescription, - "Episodes".EpisodeArtwork as episodeartwork, - "Episodes".EpisodeURL as episodeurl, - "Episodes".EpisodeDuration as episodeduration, - "UserEpisodeHistory".ListenDuration as listenduration, - "Episodes".EpisodeID as episodeid, - "Episodes".Completed as completed, - CASE WHEN "SavedEpisodes".EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS saved, - CASE WHEN "EpisodeQueue".EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS queued, - CASE WHEN "DownloadedEpisodes".EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded, - FALSE as is_youtube - FROM "Episodes" - INNER JOIN "Podcasts" ON "Episodes".PodcastID = "Podcasts".PodcastID - LEFT JOIN "UserEpisodeHistory" ON - "Episodes".EpisodeID = "UserEpisodeHistory".EpisodeID - AND "UserEpisodeHistory".UserID = %s - LEFT JOIN "SavedEpisodes" ON - "Episodes".EpisodeID = "SavedEpisodes".EpisodeID - AND "SavedEpisodes".UserID = %s - LEFT JOIN "EpisodeQueue" ON - "Episodes".EpisodeID = "EpisodeQueue".EpisodeID - AND "EpisodeQueue".UserID = %s - LEFT JOIN "DownloadedEpisodes" ON - "Episodes".EpisodeID = "DownloadedEpisodes".EpisodeID - AND "DownloadedEpisodes".UserID = %s - WHERE "Episodes".EpisodePubDate >= NOW() - INTERVAL '30 days' - AND "Podcasts".UserID = %s - - UNION ALL - - SELECT - "Podcasts".PodcastName as podcastname, - "YouTubeVideos".VideoTitle as episodetitle, - "YouTubeVideos".PublishedAt as episodepubdate, - "YouTubeVideos".VideoDescription as episodedescription, - "YouTubeVideos".ThumbnailURL as episodeartwork, - "YouTubeVideos".VideoURL as episodeurl, - "YouTubeVideos".Duration as episodeduration, - "YouTubeVideos".ListenPosition as listenduration, - "YouTubeVideos".VideoID as episodeid, - "YouTubeVideos".Completed as completed, - CASE WHEN "SavedVideos".VideoID IS NOT NULL THEN TRUE ELSE FALSE END AS saved, - CASE WHEN "EpisodeQueue".EpisodeID IS NOT NULL AND "EpisodeQueue".is_youtube = TRUE THEN TRUE ELSE FALSE END AS queued, - CASE WHEN "DownloadedVideos".VideoID IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded, - TRUE as is_youtube - FROM "YouTubeVideos" - INNER JOIN "Podcasts" ON "YouTubeVideos".PodcastID = "Podcasts".PodcastID - LEFT JOIN "SavedVideos" ON - "YouTubeVideos".VideoID = "SavedVideos".VideoID - AND "SavedVideos".UserID = %s - LEFT JOIN "EpisodeQueue" ON - "YouTubeVideos".VideoID = "EpisodeQueue".EpisodeID - AND "EpisodeQueue".UserID = %s - AND "EpisodeQueue".is_youtube = TRUE - LEFT JOIN "DownloadedVideos" ON - "YouTubeVideos".VideoID = "DownloadedVideos".VideoID - AND "DownloadedVideos".UserID = %s - WHERE "YouTubeVideos".PublishedAt >= NOW() - INTERVAL '30 days' - AND "Podcasts".UserID = %s - ) combined - ORDER BY episodepubdate DESC - """ - else: # MySQL or MariaDB - cursor = cnx.cursor(dictionary=True) - query = """ - SELECT * FROM ( - SELECT - Podcasts.PodcastName as podcastname, - Episodes.EpisodeTitle as episodetitle, - Episodes.EpisodePubDate as episodepubdate, - Episodes.EpisodeDescription as episodedescription, - Episodes.EpisodeArtwork as episodeartwork, - Episodes.EpisodeURL as episodeurl, - Episodes.EpisodeDuration as episodeduration, - UserEpisodeHistory.ListenDuration as listenduration, - Episodes.EpisodeID as episodeid, - Episodes.Completed as completed, - CASE WHEN SavedEpisodes.EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS saved, - CASE WHEN EpisodeQueue.EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS queued, - CASE WHEN DownloadedEpisodes.EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded, - FALSE as is_youtube - FROM Episodes - INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID - LEFT JOIN UserEpisodeHistory ON - Episodes.EpisodeID = UserEpisodeHistory.EpisodeID - AND UserEpisodeHistory.UserID = %s - LEFT JOIN SavedEpisodes ON - Episodes.EpisodeID = SavedEpisodes.EpisodeID - AND SavedEpisodes.UserID = %s - LEFT JOIN EpisodeQueue ON - Episodes.EpisodeID = EpisodeQueue.EpisodeID - AND EpisodeQueue.UserID = %s - LEFT JOIN DownloadedEpisodes ON - Episodes.EpisodeID = DownloadedEpisodes.EpisodeID - AND DownloadedEpisodes.UserID = %s - WHERE Episodes.EpisodePubDate >= DATE_SUB(NOW(), INTERVAL 30 DAY) - AND Podcasts.UserID = %s - - UNION ALL - - SELECT - Podcasts.PodcastName as podcastname, - YouTubeVideos.VideoTitle as episodetitle, - YouTubeVideos.PublishedAt as episodepubdate, - YouTubeVideos.VideoDescription as episodedescription, - YouTubeVideos.ThumbnailURL as episodeartwork, - YouTubeVideos.VideoURL as episodeurl, - YouTubeVideos.Duration as episodeduration, - YouTubeVideos.ListenPosition as listenduration, - YouTubeVideos.VideoID as episodeid, - YouTubeVideos.Completed as completed, - CASE WHEN SavedVideos.VideoID IS NOT NULL THEN 1 ELSE 0 END AS saved, - CASE WHEN EpisodeQueue.EpisodeID IS NOT NULL AND EpisodeQueue.is_youtube = 1 THEN 1 ELSE 0 END AS queued, - CASE WHEN DownloadedVideos.VideoID IS NOT NULL THEN 1 ELSE 0 END AS downloaded, - 1 as is_youtube - FROM YouTubeVideos - INNER JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID - LEFT JOIN SavedVideos ON - YouTubeVideos.VideoID = SavedVideos.VideoID - AND SavedVideos.UserID = %s - LEFT JOIN EpisodeQueue ON - YouTubeVideos.VideoID = EpisodeQueue.EpisodeID - AND EpisodeQueue.UserID = %s - AND EpisodeQueue.is_youtube = 1 - LEFT JOIN DownloadedVideos ON - YouTubeVideos.VideoID = DownloadedVideos.VideoID - AND DownloadedVideos.UserID = %s - WHERE YouTubeVideos.PublishedAt >= DATE_SUB(NOW(), INTERVAL 30 DAY) - AND Podcasts.UserID = %s - ) combined - ORDER BY episodepubdate DESC - """ - - # Execute with all params for both unions - params = (user_id,) * 9 # user_id repeated 9 times for all the places needed - cursor.execute(query, params) - rows = cursor.fetchall() - cursor.close() - - if not rows: - return [] - - if database_type != "postgresql": - # Convert column names to lowercase for MySQL and ensure boolean fields are actual booleans - bool_fields = ['completed', 'saved', 'queued', 'downloaded', 'is_youtube'] - rows = [{k.lower(): (bool(v) if k.lower() in bool_fields else v) - for k, v in row.items()} for row in rows] - - return rows - -def return_person_episodes(database_type, cnx, user_id: int, person_id: int): - if database_type == "postgresql": - cnx.row_factory = dict_row - cursor = cnx.cursor() - else: - cursor = cnx.cursor(dictionary=True) - - try: - if database_type == "postgresql": - query = """ - SELECT - e.EpisodeID, -- Will be NULL if no match in Episodes table - pe.EpisodeTitle, - pe.EpisodeDescription, - pe.EpisodeURL, - CASE - WHEN pe.EpisodeArtwork IS NULL THEN - (SELECT ArtworkURL FROM "Podcasts" WHERE PodcastID = pe.PodcastID) - ELSE pe.EpisodeArtwork - END as EpisodeArtwork, - pe.EpisodePubDate, - pe.EpisodeDuration, - p.PodcastName, - CASE - WHEN ( - SELECT 1 FROM "Podcasts" - WHERE PodcastID = pe.PodcastID - AND UserID = %s - ) IS NOT NULL THEN - CASE - WHEN s.EpisodeID IS NOT NULL THEN TRUE - ELSE FALSE - END - ELSE FALSE - END AS Saved, - CASE - WHEN ( - SELECT 1 FROM "Podcasts" - WHERE PodcastID = pe.PodcastID - AND UserID = %s - ) IS NOT NULL THEN - CASE - WHEN d.EpisodeID IS NOT NULL THEN TRUE - ELSE FALSE - END - ELSE FALSE - END AS Downloaded, - CASE - WHEN ( - SELECT 1 FROM "Podcasts" - WHERE PodcastID = pe.PodcastID - AND UserID = %s - ) IS NOT NULL THEN - COALESCE(h.ListenDuration, 0) - ELSE 0 - END AS ListenDuration, - FALSE as is_youtube - FROM "PeopleEpisodes" pe - INNER JOIN "People" pp ON pe.PersonID = pp.PersonID - INNER JOIN "Podcasts" p ON pe.PodcastID = p.PodcastID - LEFT JOIN "Episodes" e ON e.EpisodeURL = pe.EpisodeURL AND e.PodcastID = pe.PodcastID - LEFT JOIN ( - SELECT * FROM "SavedEpisodes" WHERE UserID = %s - ) s ON s.EpisodeID = e.EpisodeID - LEFT JOIN ( - SELECT * FROM "DownloadedEpisodes" WHERE UserID = %s - ) d ON d.EpisodeID = e.EpisodeID - LEFT JOIN ( - SELECT * FROM "UserEpisodeHistory" WHERE UserID = %s - ) h ON h.EpisodeID = e.EpisodeID - WHERE pe.PersonID = %s - AND pe.EpisodePubDate >= NOW() - INTERVAL '30 days' - ORDER BY pe.EpisodePubDate DESC; - """ - else: - query = """ - SELECT - e.EpisodeID, -- Will be NULL if no match in Episodes table - pe.EpisodeTitle, - pe.EpisodeDescription, - pe.EpisodeURL, - COALESCE(pe.EpisodeArtwork, p.ArtworkURL) as EpisodeArtwork, - pe.EpisodePubDate, - pe.EpisodeDuration, - p.PodcastName, - IF( - EXISTS( - SELECT 1 FROM Podcasts - WHERE PodcastID = pe.PodcastID - AND UserID = %s - ), - IF(s.EpisodeID IS NOT NULL, TRUE, FALSE), - FALSE - ) AS Saved, - IF( - EXISTS( - SELECT 1 FROM Podcasts - WHERE PodcastID = pe.PodcastID - AND UserID = %s - ), - IF(d.EpisodeID IS NOT NULL, TRUE, FALSE), - FALSE - ) AS Downloaded, - IF( - EXISTS( - SELECT 1 FROM Podcasts - WHERE PodcastID = pe.PodcastID - AND UserID = %s - ), - COALESCE(h.ListenDuration, 0), - 0 - ) AS ListenDuration, - FALSE as is_youtube - FROM PeopleEpisodes pe - INNER JOIN People pp ON pe.PersonID = pp.PersonID - INNER JOIN Podcasts p ON pe.PodcastID = p.PodcastID - LEFT JOIN Episodes e ON e.EpisodeURL = pe.EpisodeURL AND e.PodcastID = pe.PodcastID - LEFT JOIN ( - SELECT * FROM SavedEpisodes WHERE UserID = %s - ) s ON s.EpisodeID = e.EpisodeID - LEFT JOIN ( - SELECT * FROM DownloadedEpisodes WHERE UserID = %s - ) d ON d.EpisodeID = e.EpisodeID - LEFT JOIN ( - SELECT * FROM UserEpisodeHistory WHERE UserID = %s - ) h ON h.EpisodeID = e.EpisodeID - WHERE pe.PersonID = %s - AND pe.EpisodePubDate >= DATE_SUB(NOW(), INTERVAL 30 DAY) - ORDER BY pe.EpisodePubDate DESC; - """ - - cursor.execute(query, (user_id,) * 6 + (person_id,)) - rows = cursor.fetchall() - - if not rows: - return [] - - if database_type != "postgresql": - rows = [{k.lower(): (bool(v) if k.lower() in ['saved', 'downloaded'] else v) - for k, v in row.items()} for row in rows] - - return rows - - except Exception as e: - print(f"Error fetching person episodes: {e}") - return None - finally: - cursor.close() - -def return_podcast_episodes(database_type, cnx, user_id, podcast_id): - if database_type == "postgresql": - cnx.row_factory = dict_row - cursor = cnx.cursor() - else: # Assuming MariaDB/MySQL if not PostgreSQL - cursor = cnx.cursor(dictionary=True) - - if database_type == "postgresql": - query = ( - 'SELECT "Podcasts".PodcastID, "Podcasts".PodcastName, "Episodes".EpisodeID, ' - '"Episodes".EpisodeTitle, "Episodes".EpisodePubDate, "Episodes".EpisodeDescription, ' - '"Episodes".EpisodeArtwork, "Episodes".EpisodeURL, "Episodes".EpisodeDuration, ' - '"Episodes".Completed, ' - '"UserEpisodeHistory".ListenDuration, CAST("Episodes".EpisodeID AS VARCHAR) AS guid ' - 'FROM "Episodes" ' - 'INNER JOIN "Podcasts" ON "Episodes".PodcastID = "Podcasts".PodcastID ' - 'LEFT JOIN "UserEpisodeHistory" ON "Episodes".EpisodeID = "UserEpisodeHistory".EpisodeID AND "UserEpisodeHistory".UserID = %s ' - 'WHERE "Podcasts".PodcastID = %s AND "Podcasts".UserID = %s ' - 'ORDER BY "Episodes".EpisodePubDate DESC' - ) - else: # MySQL or MariaDB - query = ( - "SELECT Podcasts.PodcastID, Podcasts.PodcastName, Episodes.EpisodeID, " - "Episodes.EpisodeTitle, Episodes.EpisodePubDate, Episodes.EpisodeDescription, " - "Episodes.EpisodeArtwork, Episodes.EpisodeURL, Episodes.EpisodeDuration, " - "Episodes.Completed, " - "UserEpisodeHistory.ListenDuration, CAST(Episodes.EpisodeID AS CHAR) AS guid " - "FROM Episodes " - "INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID " - "LEFT JOIN UserEpisodeHistory ON Episodes.EpisodeID = UserEpisodeHistory.EpisodeID AND UserEpisodeHistory.UserID = %s " - "WHERE Podcasts.PodcastID = %s AND Podcasts.UserID = %s " - "ORDER BY Episodes.EpisodePubDate DESC" - ) - - cursor.execute(query, (user_id, podcast_id, user_id)) - rows = cursor.fetchall() - cursor.close() - - # Normalize keys - rows = capitalize_keys(rows) - - if database_type != "postgresql": - for row in rows: - row['Completed'] = bool(row['Completed']) - - return rows or None - -def return_youtube_episodes(database_type, cnx, user_id, podcast_id): - if database_type == "postgresql": - cnx.row_factory = dict_row - cursor = cnx.cursor() - else: # Assuming MariaDB/MySQL if not PostgreSQL - cursor = cnx.cursor(dictionary=True) - - if database_type == "postgresql": - query = ( - 'SELECT "Podcasts".PodcastID, "Podcasts".PodcastName, "YouTubeVideos".VideoID AS EpisodeID, ' - '"YouTubeVideos".VideoTitle AS EpisodeTitle, "YouTubeVideos".PublishedAt AS EpisodePubDate, ' - '"YouTubeVideos".VideoDescription AS EpisodeDescription, ' - '"YouTubeVideos".ThumbnailURL AS EpisodeArtwork, "YouTubeVideos".VideoURL AS EpisodeURL, ' - '"YouTubeVideos".Duration AS EpisodeDuration, ' - '"YouTubeVideos".ListenPosition AS ListenDuration, ' - '"YouTubeVideos".YouTubeVideoID AS guid ' - 'FROM "YouTubeVideos" ' - 'INNER JOIN "Podcasts" ON "YouTubeVideos".PodcastID = "Podcasts".PodcastID ' - 'WHERE "Podcasts".PodcastID = %s AND "Podcasts".UserID = %s ' - 'ORDER BY "YouTubeVideos".PublishedAt DESC' - ) - else: # MySQL or MariaDB - query = ( - "SELECT Podcasts.PodcastID, Podcasts.PodcastName, YouTubeVideos.VideoID AS EpisodeID, " - "YouTubeVideos.VideoTitle AS EpisodeTitle, YouTubeVideos.PublishedAt AS EpisodePubDate, " - "YouTubeVideos.VideoDescription AS EpisodeDescription, " - "YouTubeVideos.ThumbnailURL AS EpisodeArtwork, YouTubeVideos.VideoURL AS EpisodeURL, " - "YouTubeVideos.Duration AS EpisodeDuration, " - "YouTubeVideos.ListenPosition AS ListenDuration, " - "YouTubeVideos.YouTubeVideoID AS guid " - "FROM YouTubeVideos " - "INNER JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID " - "WHERE Podcasts.PodcastID = %s AND Podcasts.UserID = %s " - "ORDER BY YouTubeVideos.PublishedAt DESC" - ) - - cursor.execute(query, (podcast_id, user_id)) - rows = cursor.fetchall() - cursor.close() - - # Normalize keys - rows = capitalize_keys(rows) - return rows or None - -def get_podcast_details(database_type, cnx, user_id, podcast_id): - if isinstance(podcast_id, tuple): - pod_id, episode_id = podcast_id - else: - pod_id = podcast_id - episode_id = None - - if database_type == "postgresql": - cnx.row_factory = dict_row - cursor = cnx.cursor() - else: - cursor = cnx.cursor(dictionary=True) - - if database_type == "postgresql": - query = """ - SELECT * - FROM "Podcasts" - WHERE PodcastID = %s AND UserID = %s - """ - else: - query = """ - SELECT * - FROM Podcasts - WHERE PodcastID = %s AND UserID = %s - """ - - cursor.execute(query, (pod_id, user_id)) - details = cursor.fetchone() - - if not details: - cursor.execute(query, (pod_id, 1)) - details = cursor.fetchone() - - if details: - lower_row = lowercase_keys(details) - - # Only get count from YouTubeVideos if this is a YouTube channel - if lower_row.get('isyoutubechannel', False): - if database_type == "postgresql": - count_query = """ - SELECT COUNT(*) as count - FROM "YouTubeVideos" - WHERE PodcastID = %s - """ - else: - count_query = """ - SELECT COUNT(*) as count - FROM YouTubeVideos - WHERE PodcastID = %s - """ - - cursor.execute(count_query, (pod_id,)) - count_result = cursor.fetchone() - episode_count = count_result['count'] if isinstance(count_result, dict) else count_result[0] - lower_row['episodecount'] = episode_count - - if database_type != "postgresql": - lower_row['explicit'] = bool(lower_row.get('explicit', 0)) - lower_row['isyoutubechannel'] = bool(lower_row.get('isyoutubechannel', 0)) - # You might also want to handle autodownload if it's used in the frontend - lower_row['autodownload'] = bool(lower_row.get('autodownload', 0)) - - bool_fix = convert_bools(lower_row, database_type) - cursor.close() - return bool_fix - - cursor.close() - return None - - -def get_podcast_id(database_type, cnx, user_id, podcast_feed, podcast_name): - if database_type == "postgresql": - cnx.row_factory = dict_row - cursor = cnx.cursor() - else: # Assuming MariaDB/MySQL if not PostgreSQL - cursor = cnx.cursor(dictionary=True) - - if database_type == "postgresql": - query = ( - 'SELECT PodcastID ' - 'FROM "Podcasts" ' - 'WHERE FeedURL = %s AND PodcastName = %s AND UserID = %s' - ) - else: # MySQL or MariaDB - query = ( - "SELECT PodcastID " - "FROM Podcasts " - "WHERE FeedURL = %s AND PodcastName = %s AND UserID = %s" - ) - - cursor.execute(query, (podcast_feed, podcast_name, user_id)) - row = cursor.fetchone() # Fetching only one row as we expect a single result - - cursor.close() - - if not row: - return None - - if database_type == "postgresql": - return row['podcastid'] # Assuming the column name is 'PodcastID' - else: - return row['PodcastID'] # Assuming the column name is 'PodcastID' - -def get_location_value(result, key, default=None): - """ - Helper function to extract value from result set. - It handles both dictionaries and tuples. - """ - key_lower = key.lower() - if isinstance(result, dict): - return result.get(key_lower, default) - elif isinstance(result, tuple): - # Define a mapping of field names to their tuple indices for your specific queries - key_map = { - "downloadid": 0, - "downloadedlocation": 1 - } - index = key_map.get(key_lower) - return result[index] if index is not None else default - return default - -def delete_episode(database_type, cnx, episode_id, user_id, is_youtube=False): - cursor = cnx.cursor() - try: - if is_youtube: - # Get the download ID from the DownloadedVideos table - if database_type == "postgresql": - query = ( - 'SELECT DownloadID, DownloadedLocation ' - 'FROM "DownloadedVideos" ' - 'INNER JOIN "YouTubeVideos" ON "DownloadedVideos".VideoID = "YouTubeVideos".VideoID ' - 'INNER JOIN "Podcasts" ON "YouTubeVideos".PodcastID = "Podcasts".PodcastID ' - 'WHERE "YouTubeVideos".VideoID = %s AND "Podcasts".UserID = %s' - ) - else: - query = ( - "SELECT DownloadID, DownloadedLocation " - "FROM DownloadedVideos " - "INNER JOIN YouTubeVideos ON DownloadedVideos.VideoID = YouTubeVideos.VideoID " - "INNER JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID " - "WHERE YouTubeVideos.VideoID = %s AND Podcasts.UserID = %s" - ) - else: - # Original podcast episode query - if database_type == "postgresql": - query = ( - 'SELECT DownloadID, DownloadedLocation ' - 'FROM "DownloadedEpisodes" ' - 'INNER JOIN "Episodes" ON "DownloadedEpisodes".EpisodeID = "Episodes".EpisodeID ' - 'INNER JOIN "Podcasts" ON "Episodes".PodcastID = "Podcasts".PodcastID ' - 'WHERE "Episodes".EpisodeID = %s AND "Podcasts".UserID = %s' - ) - else: - query = ( - "SELECT DownloadID, DownloadedLocation " - "FROM DownloadedEpisodes " - "INNER JOIN Episodes ON DownloadedEpisodes.EpisodeID = Episodes.EpisodeID " - "INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID " - "WHERE Episodes.EpisodeID = %s AND Podcasts.UserID = %s" - ) - - cursor.execute(query, (episode_id, user_id)) - result = cursor.fetchone() - logging.debug(f"Query result: {result}") - - if not result: - logging.warning("No matching download found.") - cursor.close() - return - - download_id = get_location_value(result, "DownloadID") - downloaded_location = get_location_value(result, "DownloadedLocation") - logging.debug(f"DownloadID: {download_id}, DownloadedLocation: {downloaded_location}") - - # Delete the downloaded file (but not source YouTube file) - if downloaded_location and os.path.exists(downloaded_location): - if is_youtube: - # Only delete if it's not in the YouTube source directory - if not downloaded_location.startswith("/opt/pinepods/downloads/youtube/"): - os.remove(downloaded_location) - else: - os.remove(downloaded_location) - else: - logging.warning(f"Downloaded file not found: {downloaded_location}") - - # Remove the entry from the appropriate downloads table - if is_youtube: - if database_type == "postgresql": - query = 'DELETE FROM "DownloadedVideos" WHERE DownloadID = %s' - else: - query = "DELETE FROM DownloadedVideos WHERE DownloadID = %s" - else: - if database_type == "postgresql": - query = 'DELETE FROM "DownloadedEpisodes" WHERE DownloadID = %s' - else: - query = "DELETE FROM DownloadedEpisodes WHERE DownloadID = %s" - - cursor.execute(query, (download_id,)) - cnx.commit() - logging.info(f"Removed {cursor.rowcount} entry from the downloads table.") - - # Update UserStats table - if database_type == "postgresql": - query = 'UPDATE "UserStats" SET EpisodesDownloaded = EpisodesDownloaded - 1 WHERE UserID = %s' - else: - query = "UPDATE UserStats SET EpisodesDownloaded = EpisodesDownloaded - 1 WHERE UserID = %s" - - cursor.execute(query, (user_id,)) - cnx.commit() - - except Exception as e: - logging.error(f"Error during episode deletion: {e}") - cnx.rollback() - finally: - cursor.close() - -def return_pods(database_type, cnx, user_id): - if database_type == "postgresql": - cnx.row_factory = dict_row - cursor = cnx.cursor() - else: - cursor = cnx.cursor(dictionary=True) - - # Base query remains the same but handles nulls and empty strings with NULLIF - if database_type == "postgresql": - query = """ - SELECT - p.PodcastID, - COALESCE(NULLIF(p.PodcastName, ''), 'Unknown Podcast') as PodcastName, - COALESCE(NULLIF(p.ArtworkURL, ''), '/static/assets/default-podcast.png') as ArtworkURL, - COALESCE(NULLIF(p.Description, ''), 'No description available') as Description, - COALESCE(p.EpisodeCount, 0) as EpisodeCount, - COALESCE(NULLIF(p.WebsiteURL, ''), '') as WebsiteURL, - COALESCE(NULLIF(p.FeedURL, ''), '') as FeedURL, - COALESCE(NULLIF(p.Author, ''), 'Unknown Author') as Author, - COALESCE(NULLIF(p.Categories, ''), '') as Categories, - COALESCE(p.Explicit, FALSE) as Explicit, - COALESCE(p.PodcastIndexID, 0) as PodcastIndexID, - COUNT(DISTINCT h.UserEpisodeHistoryID) as play_count, - MIN(e.EpisodePubDate) as oldest_episode_date, - COALESCE( - (SELECT COUNT(DISTINCT ueh.EpisodeID) - FROM "UserEpisodeHistory" ueh - JOIN "Episodes" ep ON ueh.EpisodeID = ep.EpisodeID - WHERE ep.PodcastID = p.PodcastID - AND ueh.UserID = %s), - 0 - ) as episodes_played - FROM "Podcasts" p - LEFT JOIN "Episodes" e ON p.PodcastID = e.PodcastID - LEFT JOIN "UserEpisodeHistory" h ON e.EpisodeID = h.EpisodeID AND h.UserID = %s - WHERE p.UserID = %s - GROUP BY p.PodcastID - """ - else: # MySQL/MariaDB version - query = """ - SELECT - p.PodcastID, - COALESCE(NULLIF(p.PodcastName, ''), 'Unknown Podcast') as PodcastName, - COALESCE(NULLIF(p.ArtworkURL, ''), '/static/assets/default-podcast.png') as ArtworkURL, - COALESCE(NULLIF(p.Description, ''), 'No description available') as Description, - COALESCE(p.EpisodeCount, 0) as EpisodeCount, - COALESCE(NULLIF(p.WebsiteURL, ''), '') as WebsiteURL, - COALESCE(NULLIF(p.FeedURL, ''), '') as FeedURL, - COALESCE(NULLIF(p.Author, ''), 'Unknown Author') as Author, - COALESCE(NULLIF(p.Categories, ''), '') as Categories, - COALESCE(p.Explicit, FALSE) as Explicit, - COALESCE(p.PodcastIndexID, 0) as PodcastIndexID, - COUNT(DISTINCT h.UserEpisodeHistoryID) as play_count, - MIN(e.EpisodePubDate) as oldest_episode_date, - COALESCE( - (SELECT COUNT(DISTINCT ueh.EpisodeID) - FROM UserEpisodeHistory ueh - JOIN Episodes ep ON ueh.EpisodeID = ep.EpisodeID - WHERE ep.PodcastID = p.PodcastID - AND ueh.UserID = %s), - 0 - ) as episodes_played - FROM Podcasts p - LEFT JOIN Episodes e ON p.PodcastID = e.PodcastID - LEFT JOIN UserEpisodeHistory h ON e.EpisodeID = h.EpisodeID AND h.UserID = %s - WHERE p.UserID = %s - GROUP BY p.PodcastID - """ - - try: - cursor.execute(query, (user_id, user_id, user_id)) - rows = cursor.fetchall() - except Exception as e: - logging.error(f"Database error in return_pods: {str(e)}") - return [] - finally: - cursor.close() - - if not rows: - return [] - - # Process all rows, regardless of database type - processed_rows = [] - for row in rows: - # Convert to lowercase keys for consistency - processed_row = {k.lower(): v for k, v in row.items()} - - # Define default values - defaults = { - 'podcastname': 'Unknown Podcast', - 'artworkurl': '/static/assets/logo_random/11.jpeg', - 'description': 'No description available', - 'episodecount': 0, - 'websiteurl': '', - 'feedurl': '', - 'author': 'Unknown Author', - 'categories': '', - 'explicit': False, - 'podcastindexid': 0, - 'play_count': 0, - 'episodes_played': 0 - } - - # Apply defaults for any missing or null values - for key, default_value in defaults.items(): - if key not in processed_row or processed_row[key] is None or processed_row[key] == "": - processed_row[key] = default_value - - processed_rows.append(processed_row) - - return processed_rows - -def check_self_service(cnx, database_type): - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'SELECT SelfServiceUser FROM "AppSettings"' - else: # MySQL or MariaDB - query = "SELECT SelfServiceUser FROM AppSettings" - cursor.execute(query) - result = cursor.fetchone() - cursor.close() - - if database_type == "postgresql": - print(f'debug result: {result}') - logging.debug(f'debug result: {result}') - self_service = result['selfserviceuser'] if isinstance(result, dict) else result[0] - else: # MySQL or MariaDB - self_service = result[0] - - if self_service == 1: - return True - elif self_service == 0: - return False - else: - return None - -def refresh_pods_for_user(cnx, database_type, podcast_id): - print(f'Refresh begin for podcast {podcast_id}') - cursor = cnx.cursor() - if database_type == "postgresql": - select_podcast = ''' - SELECT "podcastid", "feedurl", "artworkurl", "autodownload", "username", "password", - "isyoutubechannel", COALESCE("feedurl", '') as channel_id, "feedcutoffdays" - FROM "Podcasts" - WHERE "podcastid" = %s - ''' - else: # MySQL or MariaDB - select_podcast = ''' - SELECT PodcastID, FeedURL, ArtworkURL, AutoDownload, Username, Password, - IsYouTubeChannel, COALESCE(FeedURL, '') as channel_id, FeedCutoffDays - FROM Podcasts - WHERE PodcastID = %s - ''' - cursor.execute(select_podcast, (podcast_id,)) - result = cursor.fetchone() - new_episodes = [] - - if result: - if isinstance(result, dict): - if database_type == "postgresql": - # PostgreSQL - lowercase keys - podcast_id = result['podcastid'] - feed_url = result['feedurl'] - artwork_url = result['artworkurl'] - auto_download = result['autodownload'] - username = result['username'] - password = result['password'] - is_youtube = result['isyoutubechannel'] - channel_id = result['channel_id'] - feed_cutoff = result['feedcutoffdays'] - else: - # MariaDB - uppercase keys - podcast_id = result['PodcastID'] - feed_url = result['FeedURL'] - artwork_url = result['ArtworkURL'] - auto_download = result['AutoDownload'] - username = result['Username'] - password = result['Password'] - is_youtube = result['IsYouTubeChannel'] - channel_id = result['channel_id'] - feed_cutoff = result['FeedCutoffDays'] - else: - podcast_id, feed_url, artwork_url, auto_download, username, password, is_youtube, channel_id, feed_cutoff = result - - print(f'Processing podcast: {podcast_id}') - if is_youtube: - channel_id = feed_url.split('channel/')[-1] if 'channel/' in feed_url else feed_url - channel_id = channel_id.split('/')[0].split('?')[0] - youtube.process_youtube_videos(database_type, podcast_id, channel_id, cnx, feed_cutoff) - else: - episodes = add_episodes(cnx, database_type, podcast_id, feed_url, - artwork_url, auto_download, - username=username, password=password, websocket=True) - new_episodes.extend(episodes) - - cursor.close() - return new_episodes - - -def refresh_pods(cnx, database_type): - print('refresh begin') - cursor = cnx.cursor() - if database_type == "postgresql": - select_podcasts = ''' - SELECT PodcastID, FeedURL, ArtworkURL, AutoDownload, Username, Password, - IsYouTubeChannel, UserID, COALESCE(FeedURL, '') as channel_id, FeedCutoffDays - FROM "Podcasts" - ''' - else: - select_podcasts = ''' - SELECT PodcastID, FeedURL, ArtworkURL, AutoDownload, Username, Password, - IsYouTubeChannel, UserID, COALESCE(FeedURL, '') as channel_id, FeedCutoffDays - FROM Podcasts - ''' - cursor.execute(select_podcasts) - result_set = cursor.fetchall() - for result in result_set: - podcast_id = None - try: - if isinstance(result, tuple): - podcast_id, feed_url, artwork_url, auto_download, username, password, is_youtube, user_id, channel_id, feed_cutoff = result - elif isinstance(result, dict): - if database_type == "postgresql": - podcast_id = result["podcastid"] - feed_url = result["feedurl"] - artwork_url = result["artworkurl"] - auto_download = result["autodownload"] - username = result["username"] - password = result["password"] - is_youtube = result["isyoutubechannel"] - user_id = result["userid"] - channel_id = result["channel_id"] - feed_cutoff = result["feedcutoffdays"] - else: - podcast_id = result["PodcastID"] - feed_url = result["FeedURL"] - artwork_url = result["ArtworkURL"] - auto_download = result["AutoDownload"] - username = result["Username"] - password = result["Password"] - is_youtube = result["IsYouTubeChannel"] - user_id = result["UserID"] - channel_id = result["channel_id"] - feed_cutoff = result["FeedCutoffDays"] - else: - raise ValueError(f"Unexpected result type: {type(result)}") - print(f'Running for: {podcast_id}') - if is_youtube: - # Extract channel ID from feed URL - channel_id = feed_url.split('channel/')[-1] if 'channel/' in feed_url else feed_url - # Clean up any trailing slashes or query parameters - channel_id = channel_id.split('/')[0].split('?')[0] - youtube.process_youtube_videos(database_type, podcast_id, channel_id, cnx, feed_cutoff) - else: - add_episodes(cnx, database_type, podcast_id, feed_url, artwork_url, - auto_download, username=username, password=password, websocket=False) - except Exception as e: - print(f"Error refreshing podcast {podcast_id}: {str(e)}") - continue - cursor.close() - - - -def remove_unavailable_episodes(cnx, database_type): - cursor = cnx.cursor() - - # select all episodes - # select all episodes - if database_type == "postgresql": - select_episodes = 'SELECT EpisodeID, PodcastID, EpisodeTitle, EpisodeURL, EpisodePubDate FROM "Episodes"' - else: # MySQL or MariaDB - select_episodes = "SELECT EpisodeID, PodcastID, EpisodeTitle, EpisodeURL, EpisodePubDate FROM Episodes" - cursor.execute(select_episodes) - episodes = cursor.fetchall() - - # iterate through all episodes - for episode in episodes: - episode_id, podcast_id, episode_title, episode_url, published_date = episode - - try: - # check if episode URL is still valid - response = requests.head(episode_url) - if response.status_code == 404: - # remove episode from database - if database_type == "postgresql": - delete_episode = 'DELETE FROM "Episodes" WHERE "EpisodeID"=%s' - else: # MySQL or MariaDB - delete_episode = "DELETE FROM Episodes WHERE EpisodeID=%s" - cursor.execute(delete_episode, (episode_id,)) - cnx.commit() - - except Exception as e: - print(f"Error checking episode {episode_id}: {e}") - - cursor.close() - # cnx.close() - - -def get_podcast_id_by_title(cnx, database_type, podcast_title): - cursor = cnx.cursor() - - # get the podcast ID for the specified title - # get the podcast ID for the specified title - if database_type == "postgresql": - cursor.execute('SELECT PodcastID FROM "Podcasts" WHERE Title = %s', (podcast_title,)) - else: # MySQL or MariaDB - cursor.execute("SELECT PodcastID FROM Podcasts WHERE Title = %s", (podcast_title,)) - - result = cursor.fetchone() - - if result: - return result[0] - else: - return None - - cursor.close() - # cnx.close() - # - -def get_podcast_feed_by_id(cnx, database_type, podcast_id): - cursor = cnx.cursor() - # get the podcast ID for the specified title - if database_type == "postgresql": - cursor.execute('SELECT FeedURL FROM "Podcasts" WHERE PodcastID = %s', (podcast_id,)) - else: # MySQL or MariaDB - cursor.execute("SELECT FeedURL FROM Podcasts WHERE PodcastID = %s", (podcast_id,)) - result = cursor.fetchone() - if result: - # Handle different return types - if isinstance(result, dict): - return result.get('feedurl') # Use lowercase as it seems your keys are lowercase - elif isinstance(result, tuple) or isinstance(result, list): - return result[0] - else: - # For any other type, try to return it directly - return result - else: - return None - # Note: The below code will never execute because of the return statements above - cursor.close() - # cnx.close() - -def refresh_podcast_by_title(cnx, database_type, podcast_title): - # get the podcast ID for the specified title - podcast_id = get_podcast_id_by_title(cnx, database_type, podcast_title) - - if podcast_id is not None: - # refresh the podcast with the specified ID - refresh_single_pod(cnx, database_type, podcast_id) - else: - print("Error: Could not find podcast with title {}".format(podcast_title)) - - -def refresh_single_pod(cnx, database_type, podcast_id): - cursor = cnx.cursor() - - # get the feed URL and artwork URL for the specified podcast - if database_type == "postgresql": - cursor.execute('SELECT FeedURL, ArtworkURL FROM "Podcasts" WHERE PodcastID = %s', (podcast_id,)) - else: # MySQL or MariaDB - cursor.execute("SELECT FeedURL, ArtworkURL FROM Podcasts WHERE PodcastID = %s", (podcast_id,)) - feed_url, artwork_url = cursor.fetchone() - - # parse the podcast feed - episode_dump = feedparser.parse(feed_url) - - # get the list of episode titles already in the database - if database_type == "postgresql": - cursor.execute('SELECT EpisodeTitle FROM "Episodes" WHERE PodcastID = %s', (podcast_id,)) - else: # MySQL or MariaDB - cursor.execute("SELECT EpisodeTitle FROM Episodes WHERE PodcastID = %s", (podcast_id,)) - existing_titles = set(row[0] for row in cursor.fetchall()) - - # insert any new episodes into the database - for entry in episode_dump.entries: - if hasattr(entry, "title") and hasattr(entry, "summary") and hasattr(entry, "enclosures"): - title = entry.title - - # skip episodes that are already in the database - if title in existing_titles: - continue - - description = entry.summary - audio_url = entry.enclosures[0].href if entry.enclosures else "" - release_date = dateutil.parser.parse(entry.published).strftime("%Y-%m-%d") - - # get the URL of the episode artwork, or use the podcast image URL if not available - artwork_url = entry.get('itunes_image', {}).get('href', None) or entry.get('image', {}).get('href', - None) or artwork_url - - # insert the episode into the database - if database_type == "postgresql": - add_episode = ('INSERT INTO "Episodes" ' - '(PodcastID, EpisodeTitle, EpisodeDescription, EpisodeURL, EpisodeArtwork, EpisodePubDate, EpisodeDuration) ' - 'VALUES (%s, %s, %s, %s, %s, %s, %s)') - else: # MySQL or MariaDB - add_episode = ("INSERT INTO Episodes " - "(PodcastID, EpisodeTitle, EpisodeDescription, EpisodeURL, EpisodeArtwork, EpisodePubDate, EpisodeDuration) " - "VALUES (%s, %s, %s, %s, %s, %s, %s)") - episode_values = (podcast_id, title, description, audio_url, artwork_url, release_date, 0) - cursor.execute(add_episode, episode_values) - - cnx.commit() - - cursor.close() - # cnx.close() - - -def get_hist_value(result, key, default=None): - """ - Helper function to extract value from result set. - It handles both dictionaries and tuples. - """ - if isinstance(result, dict): - return result.get(key, default) - elif isinstance(result, tuple): - key_map = { - "UserEpisodeHistoryID": 0, - } - index = key_map.get(key) - return result[index] if index is not None else default - return default - -def record_podcast_history(cnx, database_type, episode_id, user_id, episode_pos, is_youtube=False): - from datetime import datetime - cursor = cnx.cursor() - now = datetime.now() - new_listen_duration = round(episode_pos) - - if is_youtube: - # Handle YouTube video history - if database_type == "postgresql": - check_history = 'SELECT UserVideoHistoryID FROM "UserVideoHistory" WHERE VideoID = %s AND UserID = %s' - else: - check_history = "SELECT UserVideoHistoryID FROM UserVideoHistory WHERE VideoID = %s AND UserID = %s" - - cursor.execute(check_history, (episode_id, user_id)) - result = cursor.fetchone() - - if result is not None: - # Update existing video history - history_id = get_hist_value(result, "UserVideoHistoryID") - if history_id is not None: - if database_type == "postgresql": - update_history = 'UPDATE "UserVideoHistory" SET ListenDuration = %s, ListenDate = %s WHERE UserVideoHistoryID = %s' - else: - update_history = "UPDATE UserVideoHistory SET ListenDuration = %s, ListenDate = %s WHERE UserVideoHistoryID = %s" - cursor.execute(update_history, (new_listen_duration, now, history_id)) - else: - # Add new video history record - if database_type == "postgresql": - add_history = 'INSERT INTO "UserVideoHistory" (VideoID, UserID, ListenDuration, ListenDate) VALUES (%s, %s, %s, %s)' - else: - add_history = "INSERT INTO UserVideoHistory (VideoID, UserID, ListenDuration, ListenDate) VALUES (%s, %s, %s, %s)" - cursor.execute(add_history, (episode_id, user_id, new_listen_duration, now)) - else: - # Handle regular podcast episode history (existing logic) - if database_type == "postgresql": - check_history = 'SELECT UserEpisodeHistoryID FROM "UserEpisodeHistory" WHERE EpisodeID = %s AND UserID = %s' - else: - check_history = "SELECT UserEpisodeHistoryID FROM UserEpisodeHistory WHERE EpisodeID = %s AND UserID = %s" - - cursor.execute(check_history, (episode_id, user_id)) - result = cursor.fetchone() - - if result is not None: - history_id = get_hist_value(result, "UserEpisodeHistoryID") - if history_id is not None: - if database_type == "postgresql": - update_history = 'UPDATE "UserEpisodeHistory" SET ListenDuration = %s, ListenDate = %s WHERE UserEpisodeHistoryID = %s' - else: - update_history = "UPDATE UserEpisodeHistory SET ListenDuration = %s, ListenDate = %s WHERE UserEpisodeHistoryID = %s" - cursor.execute(update_history, (new_listen_duration, now, history_id)) - else: - if database_type == "postgresql": - add_history = 'INSERT INTO "UserEpisodeHistory" (EpisodeID, UserID, ListenDuration, ListenDate) VALUES (%s, %s, %s, %s)' - else: - add_history = "INSERT INTO UserEpisodeHistory (EpisodeID, UserID, ListenDuration, ListenDate) VALUES (%s, %s, %s, %s)" - cursor.execute(add_history, (episode_id, user_id, new_listen_duration, now)) - - cnx.commit() - cursor.close() - - -def get_user_id(cnx, database_type, username): - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'SELECT UserID FROM "Users" WHERE Username = %s' - else: - query = "SELECT UserID FROM Users WHERE Username = %s" - cursor.execute(query, (username,)) - result = cursor.fetchone() - cursor.close() - # cnx.close() - - if result: - return result[0] - else: - return 1 - -def get_existing_youtube_videos(cnx, database_type, podcast_id): - """Get list of existing YouTube video URLs for a podcast""" - cursor = cnx.cursor() - if database_type == "postgresql": - query = ''' - SELECT VideoURL FROM "YouTubeVideos" - WHERE PodcastID = %s - ''' - else: - query = ''' - SELECT VideoURL FROM YouTubeVideos - WHERE PodcastID = %s - ''' - - cursor.execute(query, (podcast_id,)) - results = cursor.fetchall() - cursor.close() - - existing_urls = set() - if results: - for result in results: - if isinstance(result, dict): - url = result.get("videourl") - elif isinstance(result, tuple): - url = result[0] - if url: - existing_urls.add(url) - - return existing_urls - -def get_user_id_from_pod_id(cnx, database_type, podcast_id): - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'SELECT UserID FROM "Podcasts" WHERE PodcastID = %s' - else: - query = "SELECT UserID FROM Podcasts WHERE PodcastID = %s" - - cursor.execute(query, (podcast_id,)) - result = cursor.fetchone() - - if result: - # Check if the result is a dictionary or tuple - if isinstance(result, dict): - user_id = result.get("userid") - elif isinstance(result, tuple): - user_id = result[0] - else: - user_id = None - else: - user_id = None - - cursor.close() - return user_id - - -def get_user_details(cnx, database_type, username): - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'SELECT * FROM "Users" WHERE Username = %s' - else: - query = "SELECT * FROM Users WHERE Username = %s" - cursor.execute(query, (username,)) - result = cursor.fetchone() - cursor.close() - # cnx.close() - - if result: - return { - 'UserID': result[0], - 'Fullname': result[1], - 'Username': result[2], - 'Email': result[3], - 'Hashed_PW': result[4] - } - else: - return None - - -def get_user_details_id(cnx, database_type, user_id): - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'SELECT * FROM "Users" WHERE UserID = %s' - else: - query = "SELECT * FROM Users WHERE UserID = %s" - cursor.execute(query, (user_id,)) - result = cursor.fetchone() - cursor.close() - # cnx.close() - - if result: - if isinstance(result, dict): - return { - 'UserID': result['userid'], - 'Fullname': result['fullname'], - 'Username': result['username'], - 'Email': result['email'], - 'Hashed_PW': result['hashed_pw'] - } - elif isinstance(result, tuple): - return { - 'UserID': result[0], - 'Fullname': result[1], - 'Username': result[2], - 'Email': result[3], - 'Hashed_PW': result[4] - } - else: - return None - - -def user_history(cnx, database_type, user_id): - if not cnx: - logging.error("Database connection is None.") - return [] - cursor = cnx.cursor() - try: - if database_type == "postgresql": - query = """ - SELECT * FROM ( - SELECT - "Episodes".EpisodeID as episodeid, - "UserEpisodeHistory".ListenDate as listendate, - "UserEpisodeHistory".ListenDuration as listenduration, - "Episodes".EpisodeTitle as episodetitle, - "Episodes".EpisodeDescription as episodedescription, - "Episodes".EpisodeArtwork as episodeartwork, - "Episodes".EpisodeURL as episodeurl, - "Episodes".EpisodeDuration as episodeduration, - "Podcasts".PodcastName as podcastname, - "Episodes".EpisodePubDate as episodepubdate, - "Episodes".Completed as completed, - FALSE as is_youtube - FROM "UserEpisodeHistory" - JOIN "Episodes" ON "UserEpisodeHistory".EpisodeID = "Episodes".EpisodeID - JOIN "Podcasts" ON "Episodes".PodcastID = "Podcasts".PodcastID - WHERE "UserEpisodeHistory".UserID = %s - - UNION ALL - - SELECT - "YouTubeVideos".VideoID as episodeid, - NULL as listendate, -- YouTube doesn't track listen date currently - "YouTubeVideos".ListenPosition as listenduration, - "YouTubeVideos".VideoTitle as episodetitle, - "YouTubeVideos".VideoDescription as episodedescription, - "YouTubeVideos".ThumbnailURL as episodeartwork, - "YouTubeVideos".VideoURL as episodeurl, - "YouTubeVideos".Duration as episodeduration, - "Podcasts".PodcastName as podcastname, - "YouTubeVideos".PublishedAt as episodepubdate, - "YouTubeVideos".Completed as completed, - TRUE as is_youtube - FROM "YouTubeVideos" - JOIN "Podcasts" ON "YouTubeVideos".PodcastID = "Podcasts".PodcastID - WHERE "YouTubeVideos".ListenPosition > 0 - AND "Podcasts".UserID = %s - ) combined - ORDER BY listendate DESC NULLS LAST - """ - else: # MySQL/MariaDB - cursor = cnx.cursor(dictionary=True) - query = """ - SELECT * FROM ( - SELECT - Episodes.EpisodeID as episodeid, - UserEpisodeHistory.ListenDate as listendate, - UserEpisodeHistory.ListenDuration as listenduration, - Episodes.EpisodeTitle as episodetitle, - Episodes.EpisodeDescription as episodedescription, - Episodes.EpisodeArtwork as episodeartwork, - Episodes.EpisodeURL as episodeurl, - Episodes.EpisodeDuration as episodeduration, - Podcasts.PodcastName as podcastname, - Episodes.EpisodePubDate as episodepubdate, - Episodes.Completed as completed, - FALSE as is_youtube - FROM UserEpisodeHistory - JOIN Episodes ON UserEpisodeHistory.EpisodeID = Episodes.EpisodeID - JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID - WHERE UserEpisodeHistory.UserID = %s - - UNION ALL - - SELECT - YouTubeVideos.VideoID as episodeid, - NULL as listendate, - YouTubeVideos.ListenPosition as listenduration, - YouTubeVideos.VideoTitle as episodetitle, - YouTubeVideos.VideoDescription as episodedescription, - YouTubeVideos.ThumbnailURL as episodeartwork, - YouTubeVideos.VideoURL as episodeurl, - YouTubeVideos.Duration as episodeduration, - Podcasts.PodcastName as podcastname, - YouTubeVideos.PublishedAt as episodepubdate, - YouTubeVideos.Completed as completed, - TRUE as is_youtube - FROM YouTubeVideos - JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID - WHERE YouTubeVideos.ListenPosition > 0 - AND Podcasts.UserID = %s - ) combined - ORDER BY listendate DESC - """ - - cursor.execute(query, (user_id, user_id)) - results = cursor.fetchall() - if not results: - logging.info("No results found for user history.") - return [] - - # Get column descriptions - columns = [col[0].lower() for col in cursor.description] - - # Convert results to list of dictionaries - history_episodes = [] - for row in results: - episode = {} - if isinstance(row, tuple): - for idx, column_name in enumerate(columns): - value = row[idx] - if column_name in ['completed', 'is_youtube']: - value = bool(value) - episode[column_name] = value - elif isinstance(row, dict): - for k, v in row.items(): - column_name = k.lower() - value = v - if column_name in ['completed', 'is_youtube']: - value = bool(value) - episode[column_name] = value - else: - logging.error(f"Unexpected row type: {type(row)}") - history_episodes.append(episode) - - return lowercase_keys(history_episodes) - - except Exception as e: - logging.error(f"Error executing user_history query: {e}") - raise - finally: - cursor.close() - -def download_podcast(cnx, database_type, episode_id, user_id, task_id=None, progress_callback=None): - logging.basicConfig(level=logging.INFO) - logger = logging.getLogger(__name__) - print('download podcast is running') - """ - Download a podcast episode with progress tracking. - - Args: - cnx: Database connection - database_type: Type of database (postgresql or mysql) - episode_id: ID of the episode to download - user_id: ID of the user requesting the download - task_id: Optional Celery task ID for progress tracking - progress_callback: Optional callback function to report progress (fn(progress, status)) - - Returns: - bool: True if successful, False otherwise - """ - cursor = None - temp_file = None - - try: - # Import task-specific modules inside function to avoid circular imports - if task_id: - from database_functions.tasks import download_manager - - cursor = cnx.cursor() - - # First, check if already downloaded to avoid duplicate work - if database_type == "postgresql": - query = 'SELECT 1 FROM "DownloadedEpisodes" WHERE EpisodeID = %s AND UserID = %s' - else: - query = "SELECT 1 FROM DownloadedEpisodes WHERE EpisodeID = %s AND UserID = %s" - - cursor.execute(query, (episode_id, user_id)) - if cursor.fetchone(): - logger.info(f"Episode {episode_id} already downloaded for user {user_id}") - # Update task progress to 100% if task_id is provided - if task_id: - download_manager.update_task(task_id, 100.0, "SUCCESS") - if progress_callback: - progress_callback(100.0, "SUCCESS") - return True - - # Get episode details - if database_type == "postgresql": - query = ''' - SELECT - e.EpisodeID, - e.PodcastID, - e.EpisodeTitle, - e.EpisodePubDate, - e.EpisodeURL, - e.EpisodeDescription, - e.EpisodeArtwork, - p.PodcastName, - p.Author, - p.ArtworkURL - FROM "Episodes" e - JOIN "Podcasts" p ON e.PodcastID = p.PodcastID - WHERE e.EpisodeID = %s - ''' - else: - query = ''' - SELECT - e.EpisodeID, - e.PodcastID, - e.EpisodeTitle, - e.EpisodePubDate, - e.EpisodeURL, - e.EpisodeDescription, - e.EpisodeArtwork, - p.PodcastName, - p.Author, - p.ArtworkURL - FROM Episodes e - JOIN Podcasts p ON e.PodcastID = p.PodcastID - WHERE e.EpisodeID = %s - ''' - - cursor.execute(query, (episode_id,)) - result = cursor.fetchone() - - if result is None: - logger.error(f"Episode {episode_id} not found") - if task_id: - download_manager.update_task(task_id, 0.0, "FAILED") - if progress_callback: - progress_callback(0.0, "FAILED") - return False - - # Extract episode details - if isinstance(result, dict): - episode_url = result.get('episodeurl') or result.get('EpisodeURL') - podcast_name = result.get('podcastname') or result.get('PodcastName') - episode_title = result.get('episodetitle') or result.get('EpisodeTitle') - pub_date = result.get('episodepubdate') or result.get('EpisodePubDate') - author = result.get('author') or result.get('Author') - episode_artwork = result.get('episodeartwork') or result.get('EpisodeArtwork') - artwork_url = result.get('artworkurl') or result.get('ArtworkURL') - else: - # Match positions from SELECT query - episode_url = result[4] # EpisodeURL - podcast_name = result[7] # PodcastName - episode_title = result[2] # EpisodeTitle - pub_date = result[3] # EpisodePubDate - author = result[8] # Author - episode_artwork = result[6] # EpisodeArtwork - artwork_url = result[9] # ArtworkURL - - # Update task progress if task_id is provided - if task_id: - download_manager.update_task(task_id, 5.0, "STARTED") - if progress_callback: - progress_callback(5.0, "STARTED") - - # Get user's time and date preferences - timezone, time_format, date_format = get_time_info(database_type, cnx, user_id) - - # Use default format if user preferences aren't set - if not date_format: - date_format = "ISO" - - # Format the publication date based on user preference - date_format_map = { - "ISO": "%Y-%m-%d", - "USA": "%m/%d/%Y", - "EUR": "%d.%m.%Y", - "JIS": "%Y-%m-%d", - "MDY": "%m-%d-%Y", - "DMY": "%d-%m-%Y", - "YMD": "%Y-%m-%d", - } - - date_format_str = date_format_map.get(date_format, "%Y-%m-%d") - filename_date_format_str = date_format_str.replace('/', '-').replace('\\', '-') - pub_date_str = pub_date.strftime(filename_date_format_str) - - - # Clean filenames of invalid characters - podcast_name = "".join(c for c in podcast_name if c.isalnum() or c in (' ', '-', '_')).strip() - episode_title = "".join(c for c in episode_title if c.isalnum() or c in (' ', '-', '_')).strip() - - # Create the download directory - download_dir = os.path.join("/opt/pinepods/downloads", podcast_name) - os.makedirs(download_dir, exist_ok=True) - uid = int(os.environ.get('PUID', 1000)) - gid = int(os.environ.get('PGID', 1000)) - os.chown(download_dir, uid, gid) - - # Generate filename with enhanced details - filename = f"{pub_date_str}_{episode_title}_{user_id}-{episode_id}.mp3" - file_path = os.path.join(download_dir, filename) - - # Check if file already exists - if os.path.exists(file_path): - # File exists but not in database, add the database entry - downloaded_date = datetime.datetime.fromtimestamp(os.path.getctime(file_path)) - file_size = os.path.getsize(file_path) - - if database_type == "postgresql": - query = ''' - INSERT INTO "DownloadedEpisodes" - (UserID, EpisodeID, DownloadedDate, DownloadedSize, DownloadedLocation) - VALUES (%s, %s, %s, %s, %s) - ''' - else: - query = ''' - INSERT INTO DownloadedEpisodes - (UserID, EpisodeID, DownloadedDate, DownloadedSize, DownloadedLocation) - VALUES (%s, %s, %s, %s, %s) - ''' - - cursor.execute(query, (user_id, episode_id, downloaded_date, file_size, file_path)) - cnx.commit() - - if task_id: - download_manager.update_task(task_id, 100.0, "SUCCESS") - if progress_callback: - progress_callback(100.0, "SUCCESS") - - logger.info(f"File already exists, added to database: {file_path}") - return True - - # Create a temporary file for download - temp_file = tempfile.NamedTemporaryFile(delete=False, suffix='.mp3') - temp_path = temp_file.name - temp_file.close() - - if task_id: - download_manager.update_task(task_id, 10.0, "DOWNLOADING") - if progress_callback: - progress_callback(10.0, "DOWNLOADING") - - # Download the file with progress tracking - logger.info(f"Starting download of episode {episode_id} from {episode_url}") - - try: - headers = { - 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36', - 'Referer': 'https://www.buzzsprout.com/', - 'Accept': '*/*', - 'Accept-Language': 'en-US,en;q=0.9', - 'Accept-Encoding': 'gzip, deflate, br' - } - - with requests.get(episode_url, stream=True, headers=headers) as response: - response.raise_for_status() - downloaded_date = datetime.datetime.now() - file_size = int(response.headers.get("Content-Length", 0)) - - # Stream the download to temporary file with progress tracking - downloaded_bytes = 0 - with open(temp_path, "wb") as f: - for chunk in response.iter_content(chunk_size=8192): - if chunk: - f.write(chunk) - downloaded_bytes += len(chunk) - - # Update progress every ~5% if file size is known - if file_size > 0: - progress = (downloaded_bytes / file_size) * 100 - # Only update at certain intervals to reduce overhead - if downloaded_bytes % (file_size // 20 + 1) < 8192: # ~5% intervals - download_progress = 10.0 + (progress * 0.8) # Scale to 10-90% - if task_id: - download_manager.update_task(task_id, download_progress, "DOWNLOADING") - if progress_callback: - progress_callback(download_progress, "DOWNLOADING") - except Exception as e: - logger.error(f"Failed to download episode {episode_id}: {str(e)}") - if task_id: - download_manager.update_task(task_id, 0.0, "FAILED") - if progress_callback: - progress_callback(0.0, "FAILED") - - # Clean up temp file - if os.path.exists(temp_path): - os.unlink(temp_path) - - raise - - if task_id: - download_manager.update_task(task_id, 90.0, "FINALIZING") - if progress_callback: - progress_callback(90.0, "FINALIZING") - - print(f"DEBUG - Moving temp file from: {temp_path}") - print(f"DEBUG - Moving to destination: {file_path}") - print(f"DEBUG - Directory exists check: {os.path.exists(os.path.dirname(file_path))}") - print(f"DEBUG - Date format being used: {date_format} -> {date_format_str}") - print(f"DEBUG - Formatted date: {pub_date_str}") - - # Move the temporary file to the final location - shutil.move(temp_path, file_path) - - # Set permissions - os.chown(file_path, uid, gid) - - # Add metadata to the file - metadata = { - 'title': episode_title, - 'artist': author, - 'album': podcast_name, - 'date': pub_date_str, - 'artwork_url': episode_artwork or artwork_url - } - - try: - from database_functions import mp3_metadata - mp3_metadata.add_podcast_metadata(file_path, metadata) - except Exception as e: - logger.warning(f"Failed to add metadata to {file_path}: {e}") - - # Update database - if database_type == "postgresql": - query = ''' - INSERT INTO "DownloadedEpisodes" - (UserID, EpisodeID, DownloadedDate, DownloadedSize, DownloadedLocation) - VALUES (%s, %s, %s, %s, %s) - ''' - else: - query = ''' - INSERT INTO DownloadedEpisodes - (UserID, EpisodeID, DownloadedDate, DownloadedSize, DownloadedLocation) - VALUES (%s, %s, %s, %s, %s) - ''' - - cursor.execute(query, (user_id, episode_id, downloaded_date, file_size, file_path)) - - # Update download count - if database_type == "postgresql": - query = 'UPDATE "UserStats" SET EpisodesDownloaded = EpisodesDownloaded + 1 WHERE UserID = %s' - else: - query = "UPDATE UserStats SET EpisodesDownloaded = EpisodesDownloaded + 1 WHERE UserID = %s" - - cursor.execute(query, (user_id,)) - cnx.commit() - - if task_id: - download_manager.update_task(task_id, 100.0, "SUCCESS") - if progress_callback: - progress_callback(100.0, "SUCCESS") - - logger.info(f"Successfully downloaded episode {episode_id} to {file_path}") - return True - - except requests.RequestException as e: - logger.error(f"Network error downloading episode {episode_id}: {e}") - if cursor: - cnx.rollback() - if task_id: - download_manager.update_task(task_id, 0.0, "FAILED") - if progress_callback: - progress_callback(0.0, "FAILED") - return False - except Exception as e: - logger.error(f"Error downloading episode {episode_id}: {e}", exc_info=True) - if cursor: - cnx.rollback() - if task_id: - download_manager.update_task(task_id, 0.0, "FAILED") - if progress_callback: - progress_callback(0.0, "FAILED") - return False - finally: - if cursor: - cursor.close() - # Clean up temporary file if it exists and wasn't moved - if temp_file and os.path.exists(temp_file.name): - try: - os.unlink(temp_file.name) - except: - pass - -def get_episode_ids_for_podcast(cnx, database_type, podcast_id): - """ - Get episode IDs and titles for a podcast. - Handles both PostgreSQL and MariaDB/MySQL return types. - PostgreSQL uses lowercase column names, MariaDB uses uppercase. - """ - cursor = cnx.cursor() - print(f"Database type: {database_type}") - - if database_type == "postgresql": - # In PostgreSQL, table names are capitalized but column names are lowercase - query = 'SELECT "episodeid", "episodetitle" FROM "Episodes" WHERE "podcastid" = %s' - else: # MySQL or MariaDB - query = "SELECT EpisodeID, EpisodeTitle FROM Episodes WHERE PodcastID = %s" - - cursor.execute(query, (podcast_id,)) - results = cursor.fetchall() - print(f"Raw query results (first 3): {results[:3]}") - - episodes = [] - for row in results: - # Handle different return types from different database drivers - if isinstance(row, dict): - # Dictionary return (sometimes from MariaDB) - if "episodeid" in row: # PostgreSQL lowercase keys - episode_id = row["episodeid"] - episode_title = row.get("episodetitle", "") - else: # MariaDB uppercase keys - episode_id = row["EpisodeID"] - episode_title = row.get("EpisodeTitle", "") - else: - # Tuple return (most common from PostgreSQL) - episode_id = row[0] - episode_title = row[1] if len(row) > 1 else "" - - # Check for None, empty string, or 'None' string - if not episode_title or episode_title == 'None': - # Get a real episode title from the database if possible - title_query = ( - 'SELECT "episodetitle" FROM "Episodes" WHERE "episodeid" = %s' - if database_type == "postgresql" - else "SELECT EpisodeTitle FROM Episodes WHERE EpisodeID = %s" - ) - cursor.execute(title_query, (episode_id,)) - title_result = cursor.fetchone() - - if title_result and title_result[0]: - episode_title = title_result[0] - else: - # Look up the title by podcast name + episode number if we can - ordinal_query = ( - 'SELECT p."podcastname", COUNT(*) as episode_num FROM "Episodes" e ' - 'JOIN "Podcasts" p ON e."podcastid" = p."podcastid" ' - 'WHERE p."podcastid" = %s AND e."episodeid" <= %s ' - 'GROUP BY p."podcastname"' - if database_type == "postgresql" - else "SELECT p.PodcastName, COUNT(*) as episode_num FROM Episodes e " - "JOIN Podcasts p ON e.PodcastID = p.PodcastID " - "WHERE p.PodcastID = %s AND e.EpisodeID <= %s " - "GROUP BY p.PodcastName" - ) - cursor.execute(ordinal_query, (podcast_id, episode_id)) - ordinal_result = cursor.fetchone() - - if ordinal_result and len(ordinal_result) >= 2: - podcast_name = ordinal_result[0] - episode_num = ordinal_result[1] - episode_title = f"{podcast_name} - Episode {episode_num}" - else: - # Last resort fallback - episode_title = f"Episode #{episode_id}" - - episodes.append({"id": episode_id, "title": episode_title}) - - print(f"Processed episodes (first 3): {episodes[:3]}") - cursor.close() - return episodes - -def get_video_ids_for_podcast(cnx, database_type, podcast_id): - cursor = cnx.cursor() - try: - if database_type == "postgresql": - query = """ - SELECT VideoID - FROM "YouTubeVideos" - WHERE PodcastID = %s - ORDER BY PublishedAt DESC - """ - else: - query = """ - SELECT VideoID - FROM YouTubeVideos - WHERE PodcastID = %s - ORDER BY PublishedAt DESC - """ - - cursor.execute(query, (podcast_id,)) - results = cursor.fetchall() - - # Extract the video IDs, handling both tuple and dict results - video_ids = [row[0] if isinstance(row, tuple) else row['videoid'] for row in results] - return video_ids - - finally: - cursor.close() - -def get_podcast_index_id(cnx, database_type, podcast_id): - cursor = cnx.cursor() - try: - if database_type == "postgresql": - query = 'SELECT PodcastIndexID FROM "Podcasts" WHERE PodcastID = %s' - else: # MySQL or MariaDB - query = "SELECT PodcastIndexID FROM Podcasts WHERE PodcastID = %s" - - cursor.execute(query, (podcast_id,)) - result = cursor.fetchone() - if result: - return result[0] if isinstance(result, tuple) else result.get("podcastindexid") - return None - finally: - cursor.close() - - -def download_youtube_video(cnx, database_type, video_id, user_id, task_id=None, progress_callback=None): - """ - Download a YouTube video with progress tracking. - - Args: - cnx: Database connection - database_type: Type of database (postgresql or mysql) - video_id: ID of the video to download - user_id: ID of the user requesting the download - task_id: Optional Celery task ID for progress tracking - progress_callback: Optional callback function to report progress (fn(progress, status)) - - Returns: - bool: True if successful, False otherwise - """ - cursor = None - - try: - # Import task-specific modules inside function to avoid circular imports - if task_id: - from database_functions.tasks import download_manager - - cursor = cnx.cursor() - - # Check if already downloaded - if database_type == "postgresql": - query = 'SELECT 1 FROM "DownloadedVideos" WHERE VideoID = %s AND UserID = %s' - else: - query = "SELECT 1 FROM DownloadedVideos WHERE VideoID = %s AND UserID = %s" - - cursor.execute(query, (video_id, user_id)) - if cursor.fetchone(): - # Update task progress to 100% if task_id is provided - if task_id: - download_manager.update_task(task_id, 100.0, "SUCCESS") - if progress_callback: - progress_callback(100.0, "SUCCESS") - return True - - # Update progress if task_id is provided - if task_id: - download_manager.update_task(task_id, 5.0, "STARTED") - if progress_callback: - progress_callback(5.0, "STARTED") - - # Get video details - if database_type == "postgresql": - query = ''' - SELECT - v.VideoID, - v.PodcastID, - v.VideoTitle, - v.PublishedAt, - v.VideoURL, - v.VideoDescription, - v.ThumbnailURL, - v.YouTubeVideoID, - p.PodcastName, - p.Author - FROM "YouTubeVideos" v - JOIN "Podcasts" p ON v.PodcastID = p.PodcastID - WHERE v.VideoID = %s - ''' - else: - query = ''' - SELECT - v.VideoID, - v.PodcastID, - v.VideoTitle, - v.PublishedAt, - v.VideoURL, - v.VideoDescription, - v.ThumbnailURL, - v.YouTubeVideoID, - p.PodcastName, - p.Author - FROM YouTubeVideos v - JOIN Podcasts p ON v.PodcastID = p.PodcastID - WHERE v.VideoID = %s - ''' - - cursor.execute(query, (video_id,)) - result = cursor.fetchone() - - if result is None: - if task_id: - download_manager.update_task(task_id, 0.0, "FAILED") - if progress_callback: - progress_callback(0.0, "FAILED") - return False - - # Extract values - if isinstance(result, dict): - youtube_video_id = result.get('youtubevideoid') or result.get('YouTubeVideoID') - video_title = result.get('videotitle') or result.get('VideoTitle') - pub_date = result.get('publishedat') or result.get('PublishedAt') - channel_name = result.get('podcastname') or result.get('PodcastName') - author = result.get('author') or result.get('Author') - else: - youtube_video_id = result[7] # YouTubeVideoID - video_title = result[2] # VideoTitle - pub_date = result[3] # PublishedAt - channel_name = result[8] # PodcastName - author = result[9] # Author - - if task_id: - download_manager.update_task(task_id, 10.0, "PROCESSING") - if progress_callback: - progress_callback(10.0, "PROCESSING") - - # Get user's time/date preferences and format date - timezone, time_format, date_format = get_time_info(database_type, cnx, user_id) - date_format = date_format or "ISO" - date_format_map = { - "ISO": "%Y-%m-%d", - "USA": "%m/%d/%Y", - "EUR": "%d.%m.%Y", - "JIS": "%Y-%m-%d", - "MDY": "%m-%d-%Y", - "DMY": "%d-%m-%Y", - "YMD": "%Y-%m-%d", - } - date_format_str = date_format_map.get(date_format, "%Y-%m-%d") - filename_date_format_str = date_format_str.replace('/', '-').replace('\\', '-') - pub_date_str = pub_date.strftime(filename_date_format_str) - - # Clean filenames - channel_name = "".join(c for c in channel_name if c.isalnum() or c in (' ', '-', '_')).strip() - video_title = "".join(c for c in video_title if c.isalnum() or c in (' ', '-', '_')).strip() - - # Source and destination paths - source_path = f"/opt/pinepods/downloads/youtube/{youtube_video_id}.mp3" - if not os.path.exists(source_path): - source_path = f"{source_path}.mp3" # Try with double extension - if not os.path.exists(source_path): - if task_id: - download_manager.update_task(task_id, 0.0, "FAILED") - if progress_callback: - progress_callback(0.0, "FAILED") - return False - - if task_id: - download_manager.update_task(task_id, 30.0, "PREPARING_DESTINATION") - if progress_callback: - progress_callback(30.0, "PREPARING_DESTINATION") - - # Create destination directory - download_dir = os.path.join("/opt/pinepods/downloads", channel_name) - os.makedirs(download_dir, exist_ok=True) - - # Set proper file permissions - uid = int(os.environ.get('PUID', 1000)) - gid = int(os.environ.get('PGID', 1000)) - os.chown(download_dir, uid, gid) - - # Generate destination filename - filename = f"{pub_date_str}_{video_title}_{user_id}-{video_id}.mp3" - dest_path = os.path.join(download_dir, filename) - - if task_id: - download_manager.update_task(task_id, 50.0, "DOWNLOADING") - if progress_callback: - progress_callback(50.0, "DOWNLOADING") - - # Copy file with progress tracking - try: - # Get source file size for progress tracking - source_size = os.path.getsize(source_path) - - # Use buffer-based copying to enable progress tracking - with open(source_path, 'rb') as src_file, open(dest_path, 'wb') as dest_file: - copied = 0 - buffer_size = 8192 # 8KB buffer - - while True: - buffer = src_file.read(buffer_size) - if not buffer: - break - - dest_file.write(buffer) - copied += len(buffer) - - if source_size > 0: - # Calculate progress (50-80% range for copying) - copy_progress = 50.0 + ((copied / source_size) * 30.0) - - # Update progress every ~5% to reduce overhead - if copied % (source_size // 20 + 1) < buffer_size: - if task_id: - download_manager.update_task(task_id, copy_progress, "DOWNLOADING") - if progress_callback: - progress_callback(copy_progress, "DOWNLOADING") - - except Exception as e: - if os.path.exists(dest_path): - os.unlink(dest_path) # Clean up incomplete file - if task_id: - download_manager.update_task(task_id, 0.0, "FAILED") - if progress_callback: - progress_callback(0.0, "FAILED") - raise - - # Set proper permissions on destination file - os.chown(dest_path, uid, gid) - - if task_id: - download_manager.update_task(task_id, 80.0, "FINALIZING") - if progress_callback: - progress_callback(80.0, "FINALIZING") - - # Update metadata - try: - metadata = { - 'title': video_title, - 'artist': author, - 'album': channel_name, - 'date': pub_date_str - } - from database_functions import mp3_metadata - mp3_metadata.add_podcast_metadata(dest_path, metadata) - except Exception as e: - print(f"Failed to add metadata to {dest_path}: {e}") - # Continue despite metadata failure - - if task_id: - download_manager.update_task(task_id, 90.0, "UPDATING_DATABASE") - if progress_callback: - progress_callback(90.0, "UPDATING_DATABASE") - - # Record in database - file_size = os.path.getsize(dest_path) - downloaded_date = datetime.datetime.now() - - if database_type == "postgresql": - query = ''' - INSERT INTO "DownloadedVideos" - (UserID, VideoID, DownloadedDate, DownloadedSize, DownloadedLocation) - VALUES (%s, %s, %s, %s, %s) - ''' - else: - query = ''' - INSERT INTO DownloadedVideos - (UserID, VideoID, DownloadedDate, DownloadedSize, DownloadedLocation) - VALUES (%s, %s, %s, %s, %s) - ''' - - cursor.execute(query, (user_id, video_id, downloaded_date, file_size, dest_path)) - - # Update download count - if database_type == "postgresql": - query = 'UPDATE "UserStats" SET EpisodesDownloaded = EpisodesDownloaded + 1 WHERE UserID = %s' - else: - query = "UPDATE UserStats SET EpisodesDownloaded = EpisodesDownloaded + 1 WHERE UserID = %s" - - cursor.execute(query, (user_id,)) - cnx.commit() - - if task_id: - download_manager.update_task(task_id, 100.0, "SUCCESS") - if progress_callback: - progress_callback(100.0, "SUCCESS") - - print(f"Successfully downloaded YouTube video {video_id} to {dest_path}") - return True - - except Exception as e: - print(f"Error downloading YouTube video {video_id}: {str(e)}", exc_info=True) - if cursor: - cnx.rollback() - if task_id: - download_manager.update_task(task_id, 0.0, "FAILED") - if progress_callback: - progress_callback(0.0, "FAILED") - return False - finally: - if cursor: - cursor.close() - - - - -def get_podcast_id_from_episode(cnx, database_type, episode_id, user_id, is_youtube=False): - cursor = cnx.cursor() - try: - if database_type == "postgresql": - if is_youtube: - query = """ - SELECT "YouTubeVideos".PodcastID - FROM "YouTubeVideos" - INNER JOIN "Podcasts" ON "YouTubeVideos".PodcastID = "Podcasts".PodcastID - WHERE "YouTubeVideos".VideoID = %s AND "Podcasts".UserID = %s - """ - else: - query = """ - SELECT "Episodes".PodcastID - FROM "Episodes" - INNER JOIN "Podcasts" ON "Episodes".PodcastID = "Podcasts".PodcastID - WHERE "Episodes".EpisodeID = %s AND "Podcasts".UserID = %s - """ - else: # MySQL or MariaDB - if is_youtube: - query = """ - SELECT YouTubeVideos.PodcastID - FROM YouTubeVideos - INNER JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID - WHERE YouTubeVideos.VideoID = %s AND Podcasts.UserID = %s - """ - else: - query = """ - SELECT Episodes.PodcastID - FROM Episodes - INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID - WHERE Episodes.EpisodeID = %s AND Podcasts.UserID = %s - """ - - # First try with provided user_id - cursor.execute(query, (episode_id, user_id)) - result = cursor.fetchone() - - # If not found, try with system user (1) - if not result: - cursor.execute(query, (episode_id, 1)) - result = cursor.fetchone() - - if result: - return result[0] if isinstance(result, tuple) else result.get("podcastid") - return None - except Exception as e: - logging.error(f"Error in get_podcast_id_from_episode: {str(e)}") - return None - finally: - cursor.close() - -def get_podcast_id_from_episode_name(cnx, database_type, episode_name, episode_url, user_id): - cursor = cnx.cursor() - try: - if database_type == "postgresql": - query = """ - SELECT podcast_id FROM ( - SELECT "Episodes".PodcastID as podcast_id - FROM "Episodes" - INNER JOIN "Podcasts" ON "Episodes".PodcastID = "Podcasts".PodcastID - WHERE "Episodes".EpisodeTitle = %s - AND "Episodes".EpisodeURL = %s - AND "Podcasts".UserID = %s - - UNION - - SELECT "YouTubeVideos".PodcastID as podcast_id - FROM "YouTubeVideos" - INNER JOIN "Podcasts" ON "YouTubeVideos".PodcastID = "Podcasts".PodcastID - WHERE "YouTubeVideos".VideoTitle = %s - AND "YouTubeVideos".VideoURL = %s - AND "Podcasts".UserID = %s - ) combined_results - LIMIT 1 - """ - # Pass the parameters twice because we're using them in both parts of the UNION - cursor.execute(query, (episode_name, episode_url, user_id, episode_name, episode_url, user_id)) - else: # MySQL or MariaDB - query = """ - SELECT podcast_id FROM ( - SELECT Episodes.PodcastID as podcast_id - FROM Episodes - INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID - WHERE Episodes.EpisodeTitle = %s - AND Episodes.EpisodeURL = %s - AND Podcasts.UserID = %s - - UNION - - SELECT YouTubeVideos.PodcastID as podcast_id - FROM YouTubeVideos - INNER JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID - WHERE YouTubeVideos.VideoTitle = %s - AND YouTubeVideos.VideoURL = %s - AND Podcasts.UserID = %s - ) combined_results - LIMIT 1 - """ - cursor.execute(query, (episode_name, episode_url, user_id, episode_name, episode_url, user_id)) - - result = cursor.fetchone() - if result: - return result[0] if isinstance(result, tuple) else result.get("podcast_id") - return None - except Exception as e: - logging.error(f"Error in get_podcast_id_from_episode_name: {str(e)}") - return None - finally: - cursor.close() - - -def mark_episode_completed(cnx, database_type, episode_id, user_id, is_youtube=False): - cursor = cnx.cursor() - try: - if is_youtube: - # Handle YouTube video - if database_type == "postgresql": - duration_query = 'SELECT Duration FROM "YouTubeVideos" WHERE VideoID = %s' - update_query = 'UPDATE "YouTubeVideos" SET Completed = TRUE WHERE VideoID = %s' - history_query = ''' - INSERT INTO "UserVideoHistory" (UserID, VideoID, ListenDate, ListenDuration) - VALUES (%s, %s, NOW(), %s) - ON CONFLICT (UserID, VideoID) - DO UPDATE SET ListenDuration = %s, ListenDate = NOW() - ''' - else: - duration_query = "SELECT Duration FROM YouTubeVideos WHERE VideoID = %s" - update_query = "UPDATE YouTubeVideos SET Completed = 1 WHERE VideoID = %s" - history_query = ''' - INSERT INTO UserVideoHistory (UserID, VideoID, ListenDate, ListenDuration) - VALUES (%s, %s, NOW(), %s) - ON DUPLICATE KEY UPDATE - ListenDuration = %s, - ListenDate = NOW() - ''' - else: - # Original episode logic - if database_type == "postgresql": - duration_query = 'SELECT EpisodeDuration FROM "Episodes" WHERE EpisodeID = %s' - update_query = 'UPDATE "Episodes" SET Completed = TRUE WHERE EpisodeID = %s' - history_query = ''' - INSERT INTO "UserEpisodeHistory" (UserID, EpisodeID, ListenDate, ListenDuration) - VALUES (%s, %s, NOW(), %s) - ON CONFLICT (UserID, EpisodeID) - DO UPDATE SET ListenDuration = %s, ListenDate = NOW() - ''' - else: - duration_query = "SELECT EpisodeDuration FROM Episodes WHERE EpisodeID = %s" - update_query = "UPDATE Episodes SET Completed = 1 WHERE EpisodeID = %s" - history_query = ''' - INSERT INTO UserEpisodeHistory (UserID, EpisodeID, ListenDate, ListenDuration) - VALUES (%s, %s, NOW(), %s) - ON DUPLICATE KEY UPDATE - ListenDuration = %s, - ListenDate = NOW() - ''' - - # Get duration - cursor.execute(duration_query, (episode_id,)) - duration_result = cursor.fetchone() - if duration_result: - if isinstance(duration_result, dict): - duration = duration_result['episodeduration' if not is_youtube else 'duration'] - else: # tuple - duration = duration_result[0] - else: - duration = None - - if duration: - # Update completion status - cursor.execute(update_query, (episode_id,)) - - # Update history - history_params = (user_id, episode_id, duration, duration) - cursor.execute(history_query, history_params) - - cnx.commit() - except Exception as e: - cnx.rollback() - print(f"Error in mark_episode_completed: {str(e)}") - raise e - finally: - cursor.close() - -def mark_episode_uncompleted(cnx, database_type, episode_id, user_id, is_youtube=False): - cursor = cnx.cursor() - try: - if is_youtube: - # Handle YouTube video - if database_type == "postgresql": - update_query = 'UPDATE "YouTubeVideos" SET Completed = FALSE WHERE VideoID = %s' - history_query = ''' - UPDATE "UserVideoHistory" - SET ListenDuration = 0, ListenDate = NOW() - WHERE UserID = %s AND VideoID = %s - ''' - else: - update_query = "UPDATE YouTubeVideos SET Completed = 0 WHERE VideoID = %s" - history_query = ''' - UPDATE UserVideoHistory - SET ListenDuration = 0, ListenDate = NOW() - WHERE UserID = %s AND VideoID = %s - ''' - else: - # Original episode logic - if database_type == "postgresql": - update_query = 'UPDATE "Episodes" SET Completed = FALSE WHERE EpisodeID = %s' - history_query = ''' - UPDATE "UserEpisodeHistory" - SET ListenDuration = 0, ListenDate = NOW() - WHERE UserID = %s AND EpisodeID = %s - ''' - else: - update_query = "UPDATE Episodes SET Completed = 0 WHERE EpisodeID = %s" - history_query = ''' - UPDATE UserEpisodeHistory - SET ListenDuration = 0, ListenDate = NOW() - WHERE UserID = %s AND EpisodeID = %s - ''' - - cursor.execute(update_query, (episode_id,)) - cursor.execute(history_query, (user_id, episode_id)) - cnx.commit() - finally: - cursor.close() - - -def enable_auto_download(cnx, database_type, podcast_id, user_id, auto_download): - cursor = cnx.cursor() - try: - if database_type == "postgresql": - query = 'UPDATE "Podcasts" SET AutoDownload = %s WHERE PodcastID = %s AND UserID = %s' - else: # MySQL or MariaDB - query = "UPDATE Podcasts SET AutoDownload = %s WHERE PodcastID = %s AND UserID = %s" - cursor.execute(query, (auto_download, podcast_id, user_id)) - cnx.commit() - except Exception as e: - cnx.rollback() - raise e - finally: - cursor.close() - -def set_feed_cutoff(cnx, database_type, podcast_id, user_id, feed_cutoff): - cursor = cnx.cursor() - try: - if database_type == "postgresql": - query = 'UPDATE "Podcasts" SET FeedCutoffDays = %s WHERE PodcastID = %s AND UserID = %s' - else: # MySQL or MariaDB - query = "UPDATE Podcasts SET FeedCutoffDays = %s WHERE PodcastID = %s AND UserID = %s" - cursor.execute(query, (feed_cutoff, podcast_id, user_id)) - cnx.commit() - except Exception as e: - cnx.rollback() - raise e - finally: - cursor.close() - -def call_get_auto_download_status(cnx, database_type, podcast_id, user_id): - cursor = cnx.cursor() - try: - if database_type == "postgresql": - query = 'SELECT AutoDownload FROM "Podcasts" WHERE PodcastID = %s AND UserID = %s' - else: # MySQL or MariaDB - query = "SELECT AutoDownload FROM Podcasts WHERE PodcastID = %s AND UserID = %s" - - cursor.execute(query, (podcast_id, user_id)) - result = cursor.fetchone() - - if result: - return result[0] if isinstance(result, tuple) else result.get("autodownload") - else: - return None - finally: - cursor.close() - -def set_playback_speed_podcast(cnx, database_type: str, podcast_id: int, playback_speed: float): - cursor = cnx.cursor() - try: - if database_type == "postgresql": - query = 'UPDATE "Podcasts" SET PlaybackSpeed = %s, PlaybackSpeedCustomized = TRUE WHERE PodcastID = %s' - else: # MySQL or MariaDB - query = "UPDATE Podcasts SET PlaybackSpeed = %s, PlaybackSpeedCustomized = TRUE WHERE PodcastID = %s" - - cursor.execute(query, (playback_speed, podcast_id)) - cnx.commit() - except Exception as e: - cnx.rollback() - raise e - finally: - cursor.close() - -def set_playback_speed_user(cnx, database_type: str, user_id: int, playback_speed: float): - cursor = cnx.cursor() - try: - if database_type == "postgresql": - query = 'UPDATE "Users" SET PlaybackSpeed = %s WHERE UserID = %s' - else: # MySQL or MariaDB - query = "UPDATE Users SET PlaybackSpeed = %s WHERE UserID = %s" - cursor.execute(query, (playback_speed, user_id)) - cnx.commit() - except Exception as e: - cnx.rollback() - raise e - finally: - cursor.close() - -def adjust_skip_times(cnx, database_type, podcast_id, start_skip, end_skip): - cursor = cnx.cursor() - try: - if database_type == "postgresql": - query = 'UPDATE "Podcasts" SET StartSkip = %s, EndSkip = %s WHERE PodcastID = %s' - else: # MySQL or MariaDB - query = "UPDATE Podcasts SET StartSkip = %s, EndSkip = %s WHERE PodcastID = %s" - cursor.execute(query, (start_skip, end_skip, podcast_id)) - cnx.commit() - except Exception as e: - cnx.rollback() - raise e - finally: - cursor.close() - -def get_auto_skip_times(cnx, database_type, podcast_id, user_id): - cursor = cnx.cursor() - try: - if database_type == "postgresql": - query = """ - SELECT StartSkip, EndSkip - FROM "Podcasts" - WHERE PodcastID = %s AND UserID = %s - """ - else: - query = """ - SELECT StartSkip, EndSkip - FROM Podcasts - WHERE PodcastID = %s AND UserID = %s - """ - - cursor.execute(query, (podcast_id, user_id)) - result = cursor.fetchone() - - if result: - if isinstance(result, dict): - return result.get("startskip"), result.get("endskip") - elif isinstance(result, tuple): - return result[0], result[1] - - # If no result found (user isn't subscribed), return default values - return 0, 0 - finally: - cursor.close() - -def get_play_episode_details(cnx, database_type: str, user_id: int, podcast_id: int, is_youtube: bool = False): - cursor = cnx.cursor(dictionary=True) if database_type != "postgresql" else cnx.cursor() - try: - # First get the user's default playback speed - if database_type == "postgresql": - user_query = 'SELECT PlaybackSpeed FROM "Users" WHERE UserID = %s' - else: - user_query = 'SELECT PlaybackSpeed FROM Users WHERE UserID = %s' - - cursor.execute(user_query, (user_id,)) - user_result = cursor.fetchone() - user_playback_speed = 1.0 # Default fallback - - if user_result: - if isinstance(user_result, dict): - for key in user_result: - if key.lower() == 'playbackspeed': - user_playback_speed = user_result[key] if user_result[key] is not None else 1.0 - break - else: # Tuple - user_playback_speed = user_result[0] if user_result[0] is not None else 1.0 - - # Now get podcast-specific settings - if database_type == "postgresql": - podcast_query = ''' - SELECT PlaybackSpeed, PlaybackSpeedCustomized, StartSkip, EndSkip - FROM "Podcasts" - WHERE PodcastID = %s AND UserID = %s - ''' - else: - podcast_query = ''' - SELECT PlaybackSpeed, PlaybackSpeedCustomized, StartSkip, EndSkip - FROM Podcasts - WHERE PodcastID = %s AND UserID = %s - ''' - - cursor.execute(podcast_query, (podcast_id, user_id)) - podcast_result = cursor.fetchone() - - # Default values - start_skip = 0 - end_skip = 0 - final_playback_speed = user_playback_speed # Default to user's preference - - if podcast_result: - if isinstance(podcast_result, dict): - # Case-insensitive lookup for dictionaries - for key in podcast_result: - lower_key = key.lower() - if lower_key == 'startskip': - start_skip = podcast_result[key] if podcast_result[key] is not None else 0 - elif lower_key == 'endskip': - end_skip = podcast_result[key] if podcast_result[key] is not None else 0 - elif lower_key == 'playbackspeedcustomized': - is_customized = podcast_result[key] - elif lower_key == 'playbackspeed': - podcast_speed = podcast_result[key] if podcast_result[key] is not None else 1.0 - else: # Tuple result - start_skip = podcast_result[2] if podcast_result[2] is not None else 0 - end_skip = podcast_result[3] if podcast_result[3] is not None else 0 - is_customized = podcast_result[1] - podcast_speed = podcast_result[0] if podcast_result[0] is not None else 1.0 - - # Use podcast's playback speed only if it's been customized - if is_customized: - final_playback_speed = podcast_speed - - return final_playback_speed, start_skip, end_skip - - finally: - cursor.close() - -def clear_podcast_playback_speed(cnx, database_type: str, podcast_id: int, user_id: int) -> bool: - cursor = cnx.cursor() - try: - if database_type == "postgresql": - query = ''' - UPDATE "Podcasts" - SET PlaybackSpeed = 1.0, PlaybackSpeedCustomized = FALSE - WHERE PodcastID = %s AND UserID = %s - ''' - else: - query = ''' - UPDATE Podcasts - SET PlaybackSpeed = 1.0, PlaybackSpeedCustomized = FALSE - WHERE PodcastID = %s AND UserID = %s - ''' - cursor.execute(query, (podcast_id, user_id)) - cnx.commit() - return True - except Exception as e: - print(f"Error clearing podcast playback speed: {e}") - return False - finally: - cursor.close() - - -def check_downloaded(cnx, database_type, user_id, content_id, is_youtube=False): - cursor = cnx.cursor() - - if is_youtube: - if database_type == "postgresql": - query = 'SELECT 1 FROM "DownloadedVideos" WHERE VideoID = %s AND UserID = %s' - else: - query = "SELECT 1 FROM DownloadedVideos WHERE VideoID = %s AND UserID = %s" - else: - if database_type == "postgresql": - query = 'SELECT 1 FROM "DownloadedEpisodes" WHERE EpisodeID = %s AND UserID = %s' - else: - query = "SELECT 1 FROM DownloadedEpisodes WHERE EpisodeID = %s AND UserID = %s" - - cursor.execute(query, (content_id, user_id)) - result = cursor.fetchone() is not None - cursor.close() - return result - - -def get_download_value(result, key, default=None): - """ - Helper function to extract value from result set. - It handles both dictionaries and tuples. - """ - key_lower = key.lower() - if isinstance(result, dict): - return result.get(key_lower, default) - elif isinstance(result, tuple): - # Define a mapping of field names to their tuple indices for your specific queries - key_map = { - "downloadedlocation": 0 - } - index = key_map.get(key_lower) - return result[index] if index is not None else default - return default - -def get_youtube_video_location(cnx, database_type, episode_id, user_id): - cursor = cnx.cursor() - try: - logging.info(f"Looking up YouTube video location for episode_id: {episode_id}, user_id: {user_id}") - - if database_type == "postgresql": - query = ''' - SELECT "YouTubeVideos"."youtubevideoid" - FROM "YouTubeVideos" - INNER JOIN "Podcasts" ON "YouTubeVideos"."podcastid" = "Podcasts"."podcastid" - WHERE "YouTubeVideos"."videoid" = %s AND "Podcasts"."userid" = %s - ''' - else: - query = ''' - SELECT YouTubeVideos.YouTubeVideoID - FROM YouTubeVideos - INNER JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID - WHERE YouTubeVideos.VideoID = %s AND Podcasts.UserID = %s - ''' - - cursor.execute(query, (episode_id, user_id)) - result = cursor.fetchone() - - logging.info(f"Query result: {result}") - - if result: - # Handle both dict and tuple results - youtube_id = result['youtubevideoid'] if isinstance(result, dict) else result[0] - logging.info(f"Found YouTube ID: {youtube_id}") - - file_path = os.path.join('/opt/pinepods/downloads/youtube', f'{youtube_id}.mp3') - file_path_double = os.path.join('/opt/pinepods/downloads/youtube', f'{youtube_id}.mp3.mp3') - - logging.info(f"Checking paths: {file_path} and {file_path_double}") - - if os.path.exists(file_path): - logging.info(f"Found file at {file_path}") - return file_path - elif os.path.exists(file_path_double): - logging.info(f"Found file at {file_path_double}") - return file_path_double - else: - logging.info("No file found at either path") - - else: - logging.info("No YouTube video found in database") - - return None - except Exception as e: - logging.error(f"Error retrieving YouTube video location: {e}") - import traceback - logging.error(f"Traceback: {traceback.format_exc()}") - return None - finally: - cursor.close() - -def get_download_location(cnx, database_type, episode_id, user_id): - cursor = cnx.cursor() - try: - # Check if the episode has been downloaded by the user - if database_type == "postgresql": - query = 'SELECT DownloadedLocation FROM "DownloadedEpisodes" WHERE EpisodeID = %s AND UserID = %s' - else: - query = "SELECT DownloadedLocation FROM DownloadedEpisodes WHERE EpisodeID = %s AND UserID = %s" - - cursor.execute(query, (episode_id, user_id)) - result = cursor.fetchone() - - - if result: - location = get_download_value(result, "DownloadedLocation") - print(f"DownloadedLocation found: {location}") - return location - - print("No DownloadedLocation found for the given EpisodeID and UserID") - return None - - except Exception as e: - logging.error(f"Error retrieving DownloadedLocation: {e}") - return None - - finally: - cursor.close() - -def download_episode_list(database_type, cnx, user_id): - if database_type == "postgresql": - cnx.row_factory = dict_row - cursor = cnx.cursor() - else: - cursor = cnx.cursor(dictionary=True) - - if database_type == "postgresql": - query = """ - SELECT * FROM ( - SELECT - "Podcasts".PodcastID as podcastid, - "Podcasts".PodcastName as podcastname, - "Podcasts".ArtworkURL as artworkurl, - "Episodes".EpisodeID as episodeid, - "Episodes".EpisodeTitle as episodetitle, - "Episodes".EpisodePubDate as episodepubdate, - "Episodes".EpisodeDescription as episodedescription, - "Episodes".EpisodeArtwork as episodeartwork, - "Episodes".EpisodeURL as episodeurl, - "Episodes".EpisodeDuration as episodeduration, - "Podcasts".PodcastIndexID as podcastindexid, - "Podcasts".WebsiteURL as websiteurl, - "DownloadedEpisodes".DownloadedLocation as downloadedlocation, - "UserEpisodeHistory".ListenDuration as listenduration, - "Episodes".Completed as completed, - CASE WHEN "SavedEpisodes".EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS saved, - CASE WHEN "EpisodeQueue".EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS queued, - TRUE as downloaded, - FALSE as is_youtube - FROM "DownloadedEpisodes" - INNER JOIN "Episodes" ON "DownloadedEpisodes".EpisodeID = "Episodes".EpisodeID - INNER JOIN "Podcasts" ON "Episodes".PodcastID = "Podcasts".PodcastID - LEFT JOIN "UserEpisodeHistory" ON - "DownloadedEpisodes".EpisodeID = "UserEpisodeHistory".EpisodeID - AND "DownloadedEpisodes".UserID = "UserEpisodeHistory".UserID - LEFT JOIN "SavedEpisodes" ON - "DownloadedEpisodes".EpisodeID = "SavedEpisodes".EpisodeID - AND "SavedEpisodes".UserID = %s - LEFT JOIN "EpisodeQueue" ON - "DownloadedEpisodes".EpisodeID = "EpisodeQueue".EpisodeID - AND "EpisodeQueue".UserID = %s - AND "EpisodeQueue".is_youtube = FALSE - WHERE "DownloadedEpisodes".UserID = %s - - UNION ALL - - SELECT - "Podcasts".PodcastID as podcastid, - "Podcasts".PodcastName as podcastname, - "Podcasts".ArtworkURL as artworkurl, - "YouTubeVideos".VideoID as episodeid, - "YouTubeVideos".VideoTitle as episodetitle, - "YouTubeVideos".PublishedAt as episodepubdate, - "YouTubeVideos".VideoDescription as episodedescription, - "YouTubeVideos".ThumbnailURL as episodeartwork, - "YouTubeVideos".VideoURL as episodeurl, - "YouTubeVideos".Duration as episodeduration, - "Podcasts".PodcastIndexID as podcastindexid, - "Podcasts".WebsiteURL as websiteurl, - "DownloadedVideos".DownloadedLocation as downloadedlocation, - "YouTubeVideos".ListenPosition as listenduration, - "YouTubeVideos".Completed as completed, - CASE WHEN "SavedVideos".VideoID IS NOT NULL THEN TRUE ELSE FALSE END AS saved, - CASE WHEN "EpisodeQueue".EpisodeID IS NOT NULL AND "EpisodeQueue".is_youtube = TRUE THEN TRUE ELSE FALSE END AS queued, - TRUE as downloaded, - TRUE as is_youtube - FROM "DownloadedVideos" - INNER JOIN "YouTubeVideos" ON "DownloadedVideos".VideoID = "YouTubeVideos".VideoID - INNER JOIN "Podcasts" ON "YouTubeVideos".PodcastID = "Podcasts".PodcastID - LEFT JOIN "SavedVideos" ON - "DownloadedVideos".VideoID = "SavedVideos".VideoID - AND "SavedVideos".UserID = %s - LEFT JOIN "EpisodeQueue" ON - "DownloadedVideos".VideoID = "EpisodeQueue".EpisodeID - AND "EpisodeQueue".UserID = %s - AND "EpisodeQueue".is_youtube = TRUE - WHERE "DownloadedVideos".UserID = %s - ) combined - ORDER BY episodepubdate DESC - """ - else: # MySQL or MariaDB - query = """ - SELECT * FROM ( - SELECT - Podcasts.PodcastID as podcastid, - Podcasts.PodcastName as podcastname, - Podcasts.ArtworkURL as artworkurl, - Episodes.EpisodeID as episodeid, - Episodes.EpisodeTitle as episodetitle, - Episodes.EpisodePubDate as episodepubdate, - Episodes.EpisodeDescription as episodedescription, - Episodes.EpisodeArtwork as episodeartwork, - Episodes.EpisodeURL as episodeurl, - Episodes.EpisodeDuration as episodeduration, - Podcasts.PodcastIndexID as podcastindexid, - Podcasts.WebsiteURL as websiteurl, - DownloadedEpisodes.DownloadedLocation as downloadedlocation, - UserEpisodeHistory.ListenDuration as listenduration, - Episodes.Completed as completed, - CASE WHEN SavedEpisodes.EpisodeID IS NOT NULL THEN 1 ELSE 0 END AS saved, - CASE WHEN EpisodeQueue.EpisodeID IS NOT NULL THEN 1 ELSE 0 END AS queued, - 1 as downloaded, - 0 as is_youtube - FROM DownloadedEpisodes - INNER JOIN Episodes ON DownloadedEpisodes.EpisodeID = Episodes.EpisodeID - INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID - LEFT JOIN UserEpisodeHistory ON - DownloadedEpisodes.EpisodeID = UserEpisodeHistory.EpisodeID - AND DownloadedEpisodes.UserID = UserEpisodeHistory.UserID - LEFT JOIN SavedEpisodes ON - DownloadedEpisodes.EpisodeID = SavedEpisodes.EpisodeID - AND SavedEpisodes.UserID = %s - LEFT JOIN EpisodeQueue ON - DownloadedEpisodes.EpisodeID = EpisodeQueue.EpisodeID - AND EpisodeQueue.UserID = %s - AND EpisodeQueue.is_youtube = 0 - WHERE DownloadedEpisodes.UserID = %s - - UNION ALL - - SELECT - Podcasts.PodcastID as podcastid, - Podcasts.PodcastName as podcastname, - Podcasts.ArtworkURL as artworkurl, - YouTubeVideos.VideoID as episodeid, - YouTubeVideos.VideoTitle as episodetitle, - YouTubeVideos.PublishedAt as episodepubdate, - YouTubeVideos.VideoDescription as episodedescription, - YouTubeVideos.ThumbnailURL as episodeartwork, - YouTubeVideos.VideoURL as episodeurl, - YouTubeVideos.Duration as episodeduration, - Podcasts.PodcastIndexID as podcastindexid, - Podcasts.WebsiteURL as websiteurl, - DownloadedVideos.DownloadedLocation as downloadedlocation, - YouTubeVideos.ListenPosition as listenduration, - YouTubeVideos.Completed as completed, - CASE WHEN SavedVideos.VideoID IS NOT NULL THEN 1 ELSE 0 END AS saved, - CASE WHEN EpisodeQueue.EpisodeID IS NOT NULL AND EpisodeQueue.is_youtube = 1 THEN 1 ELSE 0 END AS queued, - 1 as downloaded, - 1 as is_youtube - FROM DownloadedVideos - INNER JOIN YouTubeVideos ON DownloadedVideos.VideoID = YouTubeVideos.VideoID - INNER JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID - LEFT JOIN SavedVideos ON - DownloadedVideos.VideoID = SavedVideos.VideoID - AND SavedVideos.UserID = %s - LEFT JOIN EpisodeQueue ON - DownloadedVideos.VideoID = EpisodeQueue.EpisodeID - AND EpisodeQueue.UserID = %s - AND EpisodeQueue.is_youtube = 1 - WHERE DownloadedVideos.UserID = %s - ) combined - ORDER BY episodepubdate DESC - """ - - # Now we need 6 parameters: 3 user_ids for each part of the UNION query - cursor.execute(query, (user_id, user_id, user_id, user_id, user_id, user_id)) - rows = cursor.fetchall() - cursor.close() - - if not rows: - return None - - downloaded_episodes = lowercase_keys(rows) - - if database_type != "postgresql": - bool_fields = ['completed', 'saved', 'queued', 'downloaded', 'is_youtube'] - for episode in downloaded_episodes: - for field in bool_fields: - if field in episode: - episode[field] = bool(episode[field]) - return downloaded_episodes - -def save_email_settings(cnx, database_type, email_settings): - cursor = cnx.cursor() - - if database_type == "postgresql": - # Convert auth_required to boolean for PostgreSQL - auth_required = bool(int(email_settings['auth_required'])) - query = ( - 'UPDATE "EmailSettings" SET Server_Name = %s, Server_Port = %s, From_Email = %s, Send_Mode = %s, Encryption = %s, Auth_Required = %s, Username = %s, Password = %s WHERE EmailSettingsID = 1') - else: - # Keep auth_required as integer for other databases - auth_required = int(email_settings['auth_required']) - query = ( - "UPDATE EmailSettings SET Server_Name = %s, Server_Port = %s, From_Email = %s, Send_Mode = %s, Encryption = %s, Auth_Required = %s, Username = %s, Password = %s WHERE EmailSettingsID = 1") - - cursor.execute(query, (email_settings['server_name'], email_settings['server_port'], email_settings['from_email'], - email_settings['send_mode'], email_settings['encryption'], - auth_required, email_settings['email_username'], - email_settings['email_password'])) - - cnx.commit() - cursor.close() - # cnx.close() - -def get_encryption_key(cnx, database_type): - cursor = cnx.cursor() - if database_type == "postgresql": - query = ('SELECT EncryptionKey FROM "AppSettings" WHERE AppSettingsID = 1') - else: - query = ("SELECT EncryptionKey FROM AppSettings WHERE AppSettingsID = 1") - cursor.execute(query) - result = cursor.fetchone() - - if not result: - cursor.close() - # cnx.close() - return None - - # Convert the result to a dictionary. - result_dict = {} - if isinstance(result, tuple): - result_dict = {column[0].lower(): value for column, value in zip(cursor.description, result)} - elif isinstance(result, dict): - result_dict = {k.lower(): v for k, v in result.items()} - - cursor.close() - # cnx.close() - - # Convert the bytearray to a base64 encoded string before returning. - return base64.b64encode(result_dict['encryptionkey']).decode() - -def get_email_settings(cnx, database_type): - if database_type == "postgresql": - cursor = cnx.cursor(row_factory=dict_row) - else: - cursor = cnx.cursor() - - if database_type == "postgresql": - query = 'SELECT * FROM "EmailSettings"' - else: - query = "SELECT * FROM EmailSettings" - - cursor.execute(query) - result = cursor.fetchone() - cursor.close() - - if result: - if database_type == "postgresql": - # Normalize keys to PascalCase - settings_dict = normalize_keys(result, database_type) - else: - # For MySQL or MariaDB, convert tuple result to dictionary and keep keys as is - keys = ["Emailsettingsid", "ServerName", "ServerPort", "FromEmail", "SendMode", "Encryption", - "AuthRequired", "Username", "Password"] - settings_dict = dict(zip(keys, result)) - - # Convert AuthRequired to 0 or 1 if database is PostgreSQL - if database_type == "postgresql": - settings_dict["AuthRequired"] = 1 if settings_dict["AuthRequired"] else 0 - - return settings_dict - else: - return None - - -def get_episode_id(cnx, database_type, podcast_id, episode_title, episode_url): - if database_type == "postgresql": - cnx.row_factory = dict_row - cursor = cnx.cursor() - else: # MySQL or MariaDB - cursor = cnx.cursor() - - if database_type == "postgresql": - query = 'SELECT EpisodeID FROM "Episodes" WHERE PodcastID = %s AND EpisodeTitle = %s AND EpisodeUrl = %s' - else: # MySQL or MariaDB - query = "SELECT EpisodeID FROM Episodes WHERE PodcastID = %s AND EpisodeTitle = %s AND EpisodeUrl = %s" - - params = (podcast_id, episode_title, episode_url) - - cursor.execute(query, params) - result = cursor.fetchone() - - if result: - episode_id = result['episodeid'] if database_type == "postgresql" else result[0] - else: - # Episode not found, insert a new episode into the Episodes table - if database_type == "postgresql": - query = 'INSERT INTO "Episodes" (PodcastID, EpisodeTitle, EpisodeUrl) VALUES (%s, %s, %s) RETURNING EpisodeID' - else: # MySQL or MariaDB - query = "INSERT INTO Episodes (PodcastID, EpisodeTitle, EpisodeUrl) VALUES (%s, %s, %s)" - - cursor.execute(query, params) - if database_type == "postgresql": - episode_id = cursor.fetchone()['EpisodeID'] - else: - episode_id = cursor.lastrowid - - cnx.commit() - cursor.close() - - return episode_id - -def get_episode_id_ep_name(cnx, database_type, podcast_title, episode_url): - if database_type == "postgresql": - cnx.row_factory = dict_row - cursor = cnx.cursor() - query = ''' - SELECT e.EpisodeID - FROM "Episodes" e - JOIN "Podcasts" p ON e.PodcastID = p.PodcastID - WHERE p.PodcastName = %s AND e.EpisodeURL = %s - ''' - else: # MySQL or MariaDB - cursor = cnx.cursor() - query = ''' - SELECT e.EpisodeID - FROM Episodes e - JOIN Podcasts p ON e.PodcastID = p.PodcastID - WHERE p.PodcastName = %s AND e.EpisodeURL = %s - ''' - - params = (podcast_title, episode_url) - cursor.execute(query, params) - result = cursor.fetchone() - - if result: - episode_id = result['episodeid'] if database_type == "postgresql" else result[0] - else: - episode_id = None - print(f"No match found for Podcast Name: '{podcast_title}' and Episode URL: '{episode_url}'") - - cursor.close() - return episode_id - -def get_episode_id_by_url(cnx, database_type, episode_url): - cursor = cnx.cursor() - try: - if database_type == "postgresql": - query = 'SELECT EpisodeID FROM "Episodes" WHERE EpisodeURL = %s' - else: - query = "SELECT EpisodeID FROM Episodes WHERE EpisodeURL = %s" - - params = (episode_url,) # Ensure this is a tuple - cursor.execute(query, params) - result = cursor.fetchone() - - if result: - # Handle both tuple and dictionary-like results - if isinstance(result, dict): - # Try with both camelCase and lowercase keys - episode_id = result.get("episodeid") or result.get("EpisodeID") - else: # Assume it's a tuple or tuple-like - episode_id = result[0] - - return episode_id - return None # No matching episode found - except Exception as e: - print(f"Error in get_episode_id_by_url: {e}") - return None - finally: - cursor.close() - - - -def queue_podcast_entry(cnx, database_type, user_id, episode_title, episode_url): - if database_type == "postgresql": - cnx.row_factory = dict_row - cursor = cnx.cursor() - else: # MySQL or MariaDB - cursor = cnx.cursor() - - # Get the episode ID using the episode title and URL - if database_type == "postgresql": - query = 'SELECT EpisodeID, PodcastID FROM "Episodes" WHERE EpisodeTitle = %s AND EpisodeURL = %s' - else: # MySQL or MariaDB - query = "SELECT EpisodeID, PodcastID FROM Episodes WHERE EpisodeTitle = %s AND EpisodeURL = %s" - cursor.execute(query, (episode_title, episode_url)) - result = cursor.fetchone() - - if result: - episode_id, podcast_id = result['EpisodeID'] if database_type == "postgresql" else result - - # Check if the episode is already in the queue - if database_type == "postgresql": - query = 'SELECT COUNT(*) FROM "EpisodeQueue" WHERE UserID = %s AND EpisodeID = %s' - else: # MySQL or MariaDB - query = "SELECT COUNT(*) FROM EpisodeQueue WHERE UserID = %s AND EpisodeID = %s" - cursor.execute(query, (user_id, episode_id)) - count = cursor.fetchone()[0] - - if count > 0: - # Episode is already in the queue, move it to position 1 and update the QueueDate - if database_type == "postgresql": - query = 'UPDATE "EpisodeQueue" SET QueuePosition = 1, QueueDate = CURRENT_TIMESTAMP WHERE UserID = %s AND EpisodeID = %s' - else: # MySQL or MariaDB - query = "UPDATE EpisodeQueue SET QueuePosition = 1, QueueDate = CURRENT_TIMESTAMP WHERE UserID = %s AND EpisodeID = %s" - cursor.execute(query, (user_id, episode_id)) - cnx.commit() - else: - # Episode is not in the queue, insert it at position 1 - if database_type == "postgresql": - query = 'INSERT INTO "EpisodeQueue" (UserID, EpisodeID, QueuePosition) VALUES (%s, %s, 1)' - else: # MySQL or MariaDB - query = "INSERT INTO EpisodeQueue (UserID, EpisodeID, QueuePosition) VALUES (%s, %s, 1)" - cursor.execute(query, (user_id, episode_id)) - cnx.commit() - - cursor.close() - return True - else: - # Episode not found in the database - cursor.close() - return False - - -def episode_remove_queue(cnx, database_type, user_id, url, title): - cursor = cnx.cursor() - - # Get the episode ID using the episode title and URL - if database_type == "postgresql": - query = 'SELECT EpisodeID FROM "Episodes" WHERE EpisodeTitle = %s AND EpisodeURL = %s' - else: - query = "SELECT EpisodeID FROM Episodes WHERE EpisodeTitle = %s AND EpisodeURL = %s" - cursor.execute(query, (title, url)) - episode_id = cursor.fetchone() - - if episode_id: - # Remove the episode from the user's queue - - if database_type == "postgresql": - query = 'DELETE FROM "EpisodeQueue" WHERE UserID = %s AND EpisodeID = %s' - else: - query = "DELETE FROM EpisodeQueue WHERE UserID = %s AND EpisodeID = %s" - cursor.execute(query, (user_id, episode_id[0])) # Extract the episode ID from the tuple - cnx.commit() - - cursor.close() - # cnx.close() - - return True - else: - # Episode not found in the database - cursor.close() - # cnx.close() - return False - - -def check_usernames(cnx, database_type, username): - cursor = cnx.cursor() - if database_type == 'postgresql': - query = ('SELECT COUNT(*) FROM "Users" WHERE Username = %s') - else: - query = ("SELECT COUNT(*) FROM Users WHERE Username = %s") - cursor.execute(query, (username,)) - count = cursor.fetchone()[0] - cursor.close() - # cnx.close() - return count > 0 - -def record_listen_duration(cnx, database_type, episode_id, user_id, listen_duration): - if listen_duration < 0: - logging.info(f"Skipped updating listen duration for user {user_id} and episode {episode_id} due to invalid duration: {listen_duration}") - return - listen_date = datetime.datetime.now() - cursor = cnx.cursor() - - try: - # Check if UserEpisodeHistory row already exists for the given user and episode - if database_type == "postgresql": - cursor.execute('SELECT ListenDuration FROM "UserEpisodeHistory" WHERE UserID=%s AND EpisodeID=%s', (user_id, episode_id)) - else: - cursor.execute("SELECT ListenDuration FROM UserEpisodeHistory WHERE UserID=%s AND EpisodeID=%s", (user_id, episode_id)) - result = cursor.fetchone() - if result is not None: - existing_duration = result[0] if isinstance(result, tuple) else result.get("ListenDuration") - # Ensure existing_duration is not None - existing_duration = existing_duration if existing_duration is not None else 0 - # Update only if the new duration is greater than the existing duration - if listen_duration > existing_duration: - if database_type == "postgresql": - update_listen_duration = 'UPDATE "UserEpisodeHistory" SET ListenDuration=%s, ListenDate=%s WHERE UserID=%s AND EpisodeID=%s' - else: - update_listen_duration = "UPDATE UserEpisodeHistory SET ListenDuration=%s, ListenDate=%s WHERE UserID=%s AND EpisodeID=%s" - cursor.execute(update_listen_duration, (listen_duration, listen_date, user_id, episode_id)) - else: - print(f"No update required for user {user_id} and episode {episode_id} as existing duration {existing_duration} is greater than or equal to new duration {listen_duration}") - else: - # Insert new row - if database_type == "postgresql": - add_listen_duration = 'INSERT INTO "UserEpisodeHistory" (UserID, EpisodeID, ListenDate, ListenDuration) VALUES (%s, %s, %s, %s)' - else: - add_listen_duration = "INSERT INTO UserEpisodeHistory (UserID, EpisodeID, ListenDate, ListenDuration) VALUES (%s, %s, %s, %s)" - cursor.execute(add_listen_duration, (user_id, episode_id, listen_date, listen_duration)) - cnx.commit() - except Exception as e: - logging.error(f"Failed to record listen duration due to: {e}") - cnx.rollback() - finally: - cursor.close() - # cnx.close() - - -def record_youtube_listen_duration(cnx, database_type, video_id, user_id, listen_duration): - if listen_duration < 0: - logging.info(f"Skipped updating listen duration for user {user_id} and video {video_id} due to invalid duration: {listen_duration}") - return - - listen_date = datetime.datetime.now() - cursor = cnx.cursor() - try: - # Check if UserVideoHistory exists (we'll need to create this table) - if database_type == "postgresql": - cursor.execute('SELECT ListenDuration FROM "UserVideoHistory" WHERE UserID=%s AND VideoID=%s', (user_id, video_id)) - else: - cursor.execute("SELECT ListenDuration FROM UserVideoHistory WHERE UserID=%s AND VideoID=%s", (user_id, video_id)) - - result = cursor.fetchone() - - if result is not None: - existing_duration = result[0] if isinstance(result, tuple) else result.get("ListenDuration") - existing_duration = existing_duration if existing_duration is not None else 0 - - if listen_duration > existing_duration: - if database_type == "postgresql": - update_listen_duration = 'UPDATE "UserVideoHistory" SET ListenDuration=%s, ListenDate=%s WHERE UserID=%s AND VideoID=%s' - else: - update_listen_duration = "UPDATE UserVideoHistory SET ListenDuration=%s, ListenDate=%s WHERE UserID=%s AND VideoID=%s" - cursor.execute(update_listen_duration, (listen_duration, listen_date, user_id, video_id)) - - # Also update the ListenPosition in YouTubeVideos table - if database_type == "postgresql": - cursor.execute('UPDATE "YouTubeVideos" SET ListenPosition=%s WHERE VideoID=%s', - (listen_duration, video_id)) - else: - cursor.execute("UPDATE YouTubeVideos SET ListenPosition=%s WHERE VideoID=%s", - (listen_duration, video_id)) - else: - # Insert new row - if database_type == "postgresql": - add_listen_duration = 'INSERT INTO "UserVideoHistory" (UserID, VideoID, ListenDate, ListenDuration) VALUES (%s, %s, %s, %s)' - else: - add_listen_duration = "INSERT INTO UserVideoHistory (UserID, VideoID, ListenDate, ListenDuration) VALUES (%s, %s, %s, %s)" - cursor.execute(add_listen_duration, (user_id, video_id, listen_date, listen_duration)) - - # Update ListenPosition in YouTubeVideos - if database_type == "postgresql": - cursor.execute('UPDATE "YouTubeVideos" SET ListenPosition=%s WHERE VideoID=%s', - (listen_duration, video_id)) - else: - cursor.execute("UPDATE YouTubeVideos SET ListenPosition=%s WHERE VideoID=%s", - (listen_duration, video_id)) - - cnx.commit() - except Exception as e: - logging.error(f"Failed to record YouTube listen duration due to: {e}") - cnx.rollback() - finally: - cursor.close() - - -def get_local_episode_times(cnx, database_type, user_id): - if database_type == "postgresql": - cnx.row_factory = dict_row - cursor = cnx.cursor() - else: # MySQL or MariaDB - cursor = cnx.cursor(dictionary=True) - - if database_type == "postgresql": - cursor.execute(""" - SELECT - e.EpisodeURL, - p.FeedURL, - ueh.ListenDuration, - e.EpisodeDuration, - e.Completed - FROM "UserEpisodeHistory" ueh - JOIN "Episodes" e ON ueh.EpisodeID = e.EpisodeID - JOIN "Podcasts" p ON e.PodcastID = p.PodcastID - WHERE ueh.UserID = %s - """, (user_id,)) - else: # MySQL or MariaDB - cursor.execute(""" - SELECT - e.EpisodeURL as episode_url, - p.FeedURL as podcast_url, - ueh.ListenDuration as listen_duration, - e.EpisodeDuration as episode_duration, - e.Completed as completed - FROM UserEpisodeHistory ueh - JOIN Episodes e ON ueh.EpisodeID = e.EpisodeID - JOIN Podcasts p ON e.PodcastID = p.PodcastID - WHERE ueh.UserID = %s - """, (user_id,)) - - # Handle different return types - episode_times = [] - for row in cursor.fetchall(): - if isinstance(row, dict): - # For MySQL with dictionary=True or PostgreSQL with dict_row - if database_type == "postgresql": - # PostgreSQL keys match the original column names - episode_times.append({ - "episode_url": row["episodeurl"], - "podcast_url": row["feedurl"], - "listen_duration": row["listenduration"], - "episode_duration": row["episodeduration"], - "completed": row["completed"] - }) - else: - # MySQL's column aliases should match our expected keys - episode_times.append({ - "episode_url": row["episode_url"], - "podcast_url": row["podcast_url"], - "listen_duration": row["listen_duration"], - "episode_duration": row["episode_duration"], - "completed": row["completed"] - }) - else: - # Handle tuple responses - episode_times.append({ - "episode_url": row[0], - "podcast_url": row[1], - "listen_duration": row[2], - "episode_duration": row[3], - "completed": row[4] - }) - - cursor.close() - return episode_times - - - -def generate_guid(episode_time): - import uuid - # Concatenate the podcast and episode URLs to form a unique string for each episode - unique_string = episode_time["podcast_url"] + episode_time["episode_url"] - # Generate a UUID based on the MD5 hash of the unique string - guid = uuid.uuid3(uuid.NAMESPACE_URL, unique_string) - return str(guid) - -def get_playback_speed(cnx, database_type: str, user_id: int, is_youtube: bool, podcast_id: Optional[int] = None) -> float: - cursor = cnx.cursor() - try: - if database_type == "postgresql": - if podcast_id is None: - query = 'SELECT PlaybackSpeed FROM "Users" WHERE UserID = %s' - cursor.execute(query, (user_id,)) - else: - query = 'SELECT PlaybackSpeed FROM "Podcasts" WHERE PodcastID = %s' - cursor.execute(query, (podcast_id,)) - else: - if podcast_id is None: - query = 'SELECT PlaybackSpeed FROM Users WHERE UserID = %s' - cursor.execute(query, (user_id,)) - else: - query = 'SELECT PlaybackSpeed FROM Podcasts WHERE PodcastID = %s' - cursor.execute(query, (podcast_id,)) - - result = cursor.fetchone() - - if result: - # Handle dictionary return type (case-insensitive lookup) - if isinstance(result, dict): - # Try to find the key in a case-insensitive way - for key in result: - if key.lower() == 'playbackspeed': - return result[key] if result[key] is not None else 1.0 - # If we get here, the key wasn't found - return 1.0 - # Handle tuple return type - else: - return result[0] if result[0] is not None else 1.0 - return 1.0 - finally: - cursor.close() - - -def check_episode_playback(cnx, database_type, user_id, episode_title, episode_url): - if database_type == "postgresql": - cnx.row_factory = dict_row - cursor = cnx.cursor() - else: # MySQL or MariaDB - cursor = cnx.cursor() - - try: - # Get the EpisodeID from the Episodes table - if database_type == "postgresql": - query = """ - SELECT e.EpisodeID - FROM "Episodes" e - JOIN "Podcasts" p ON e.PodcastID = p.PodcastID - WHERE e.EpisodeTitle = %s AND e.EpisodeURL = %s AND p.UserID = %s - """ - else: # MySQL or MariaDB - query = """ - SELECT e.EpisodeID - FROM Episodes e - JOIN Podcasts p ON e.PodcastID = p.PodcastID - WHERE e.EpisodeTitle = %s AND e.EpisodeURL = %s AND p.UserID = %s - """ - cursor.execute(query, (episode_title, episode_url, user_id)) - result = cursor.fetchone() - - # Check if the EpisodeID is None - if result is None: - return False, 0 - - episode_id = result['EpisodeID'] if database_type == "postgresql" else result[0] - - # Check if the user has played the episode before - if database_type == "postgresql": - query = 'SELECT ListenDuration FROM "UserEpisodeHistory" WHERE UserID = %s AND EpisodeID = %s' - else: # MySQL or MariaDB - query = "SELECT ListenDuration FROM UserEpisodeHistory WHERE UserID = %s AND EpisodeID = %s" - cursor.execute(query, (user_id, episode_id)) - result = cursor.fetchone() - - if result: - listen_duration = result['ListenDuration'] if database_type == "postgresql" else result[0] - return True, listen_duration - else: - return False, 0 - except (psycopg.errors.InterfaceError, mysql.connector.errors.InterfaceError): - return False, 0 - finally: - if cursor: - cursor.close() - - - -# def get_episode_listen_time(cnx, user_id, title, url): -# cursor = None -# try: -# cursor = cnx.cursor() - -# # Get the EpisodeID from the Episodes table -# query = "SELECT EpisodeID FROM Episodes WHERE EpisodeTitle = %s AND EpisodeURL = %s" -# cursor.execute(query, (title, url)) -# episode_id = cursor.fetchone()[0] - -# # Get the user's listen duration for this episode -# query = "SELECT ListenDuration FROM UserEpisodeHistory WHERE UserID = %s AND EpisodeID = %s" -# cursor.execute(query, (user_id, episode_id)) -# listen_duration = cursor.fetchone()[0] - -# return listen_duration - -# # Seek to the user's last listen duration -# # current_episode.seek_to_second(listen_duration) - -# finally: -# if cursor: -# cursor.close() -# # cnx.close() - - -def get_theme(cnx, database_type, user_id): - cursor = None - try: - cursor = cnx.cursor() - - # Get the EpisodeID from the Episodes table - if database_type == 'postgresql': - query = 'SELECT Theme FROM "UserSettings" WHERE UserID = %s' - else: - query = "SELECT Theme FROM UserSettings WHERE UserID = %s" - cursor.execute(query, (user_id,)) - result = cursor.fetchone() - # Check the type of the result and access the theme accordingly - if isinstance(result, dict): - theme = result["theme"] - else: - theme = result[0] - - return theme - - finally: - if cursor: - cursor.close() - # cnx.close() - - -def set_theme(cnx, database_type, user_id, theme): - cursor = None - try: - cursor = cnx.cursor() - - # Update the UserSettings table with the new theme value - if database_type == 'postgresql': - query = 'UPDATE "UserSettings" SET Theme = %s WHERE UserID = %s' - else: - query = "UPDATE UserSettings SET Theme = %s WHERE UserID = %s" - cursor.execute(query, (theme, user_id)) - cnx.commit() - - finally: - if cursor: - cursor.close() - # cnx.close( - - -def get_user_info(database_type, cnx): - try: - if database_type == "postgresql": - cnx.row_factory = dict_row - cursor = cnx.cursor() - query = 'SELECT UserID, Fullname, Username, Email, CASE WHEN IsAdmin THEN 1 ELSE 0 END AS IsAdmin FROM "Users"' - else: # MySQL or MariaDB - cursor = cnx.cursor(dictionary=True) - query = "SELECT UserID, Fullname, Username, Email, IsAdmin FROM Users" - - cursor.execute(query) - rows = cursor.fetchall() - - if not rows: - return None - - if database_type != "postgresql": - # Convert column names to lowercase for MySQL - rows = [{k.lower(): v for k, v in row.items()} for row in rows] - - return rows - - except Exception as e: - print(f"Error getting user info: {e}") - return None - - finally: - if cursor: - cursor.close() - - -def get_my_user_info(database_type, cnx, user_id): - try: - if database_type == "postgresql": - cnx.row_factory = dict_row - cursor = cnx.cursor() - query = ''' - SELECT UserID, Fullname, Username, Email, - CASE WHEN IsAdmin THEN 1 ELSE 0 END AS IsAdmin - FROM "Users" - WHERE UserID = %s - ''' - else: # MySQL or MariaDB - cursor = cnx.cursor(dictionary=True) - query = """ - SELECT UserID, Fullname, Username, Email, IsAdmin - FROM Users - WHERE UserID = %s - """ - cursor.execute(query, (user_id,)) - row = cursor.fetchone() - - if not row: - return None - - # Handle both dict and tuple cases - if isinstance(row, dict): - # For MySQL, convert keys to lowercase - if database_type != "postgresql": - return {k.lower(): v if v is not None else "" for k, v in row.items()} - return {k: v if v is not None else "" for k, v in row.items()} - else: - # Handle tuple case by creating dict with known column order - columns = ['userid', 'fullname', 'username', 'email', 'isadmin'] - return {columns[i]: v if v is not None else "" for i, v in enumerate(row)} - - except Exception as e: - print(f"Error getting user info: {e}") - return None - finally: - if cursor: - cursor.close() - -def get_api_info(database_type, cnx, user_id): - # Check if the user is an admin - if database_type == "postgresql": - cursor = cnx.cursor() - is_admin_query = 'SELECT IsAdmin FROM "Users" WHERE UserID = %s' - else: # MySQL or MariaDB - cursor = cnx.cursor() - is_admin_query = "SELECT IsAdmin FROM Users WHERE UserID = %s" - - cursor.execute(is_admin_query, (user_id,)) - is_admin_result = cursor.fetchone() - cursor.close() - - # Adjusting access based on the result type - is_admin = is_admin_result[0] if isinstance(is_admin_result, tuple) else is_admin_result["isadmin"] if is_admin_result else 0 - - # Adjust the query based on whether the user is an admin - if database_type == "postgresql": - cnx.row_factory = dict_row - cursor = cnx.cursor() - query = ( - 'SELECT APIKeyID, "APIKeys".UserID, Username, RIGHT(APIKey, 4) as LastFourDigits, Created, ARRAY[]::integer[] AS PodcastIDs ' - 'FROM "APIKeys" ' - 'JOIN "Users" ON "APIKeys".UserID = "Users".UserID ' - ) - else: # MySQL or MariaDB - cursor = cnx.cursor(dictionary=True) - query = ( - "SELECT APIKeyID, APIKeys.UserID, Username, RIGHT(APIKey, 4) as LastFourDigits, Created, '' AS PodcastIDs " - "FROM APIKeys " - "JOIN Users ON APIKeys.UserID = Users.UserID " - ) - - # Append condition to query if the user is not an admin - if not is_admin: - if database_type == 'postgresql': - query += 'WHERE "APIKeys".UserID = %s' - else: - query += "WHERE APIKeys.UserID = %s" - - # TODO: remove after testing - if database_type == 'postgresql': - query += ''' - UNION ALL - SELECT "RssKeys".RssKeyID, "RssKeys".UserID, "Users".Username, RIGHT("RssKeys".RssKey, 4) as LastFourDigits, "RssKeys".Created, ARRAY_AGG("RssKeyMap".PodcastID) as PodcastIDs - FROM "RssKeys" - JOIN "Users" ON "RssKeys".UserID = "Users".UserID - JOIN "RssKeyMap" ON "RssKeys".RssKeyID = "RssKeyMap".RssKeyID - GROUP BY "RssKeys".RssKeyID, "RssKeys".UserID, "Users".Username, "RssKeys".RssKey, "RssKeys".Created - ''' - else: - query += ''' - UNION ALL - SELECT RssKeys.RssKeyID, RssKeys.UserID, Users.Username, RIGHT(RssKeys.RssKey, 4) as LastFourDigits, RssKeys.Created, GROUP_CONCAT(CAST(RssKeyMap.PodcastID AS CHAR)) as PodcastIDs - FROM RssKeys - JOIN Users ON RssKeys.UserID = Users.UserID - JOIN RssKeyMap ON RssKeys.RssKeyID = RssKeyMap.RssKeyID - GROUP BY RssKeys.RssKeyID, RssKeys.UserID, Users.Username, RssKeys.RssKey, RssKeys.Created - ''' - - if not is_admin: - if database_type == 'postgresql': - query += 'WHERE "RssKeys".UserID = %s' - else: - query += 'WHERE RssKeys.UserID = %s' - - cursor.execute(query, (user_id, user_id) if not is_admin else ()) - rows = cursor.fetchall() - cursor.close() - - if not rows: - return [] - - if database_type != "postgresql": - # Convert column names to lowercase for MySQL - rows = [{k.lower(): v for k, v in row.items()} for row in rows] - - return rows - -def create_api_key(cnx, database_type: str, user_id: int): - import secrets - import string - alphabet = string.ascii_letters + string.digits - api_key = ''.join(secrets.choice(alphabet) for _ in range(64)) - - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'INSERT INTO "APIKeys" (UserID, APIKey) VALUES (%s, %s)' - else: # MySQL or MariaDB - query = "INSERT INTO APIKeys (UserID, APIKey) VALUES (%s, %s)" - - cursor.execute(query, (user_id, api_key)) - cnx.commit() - cursor.close() - - return api_key - -def create_rss_key(cnx, database_type: str, user_id: int, podcast_ids: list[int] = None): - import secrets - import string - alphabet = string.ascii_letters + string.digits - api_key = ''.join(secrets.choice(alphabet) for _ in range(64)) - - cursor = cnx.cursor() - try: - if database_type == "postgresql": - query = 'INSERT INTO "RssKeys" (UserID, RssKey) VALUES (%s, %s) RETURNING RssKeyID' - cursor.execute(query, (user_id, api_key)) - result = cursor.fetchone() - if not result: - raise Exception("Failed to create RSS key - no ID returned") - - # Handle both tuple and dict return types - if isinstance(result, dict): - rss_key_id = result.get('rsskeyid') or result.get('RssKeyID') - else: - rss_key_id = result[0] - - if not rss_key_id: - raise Exception("Failed to create RSS key - invalid ID returned") - else: - query = "INSERT INTO RssKeys (UserID, RssKey) VALUES (%s, %s)" - cursor.execute(query, (user_id, api_key)) - rss_key_id = cursor.lastrowid - if not rss_key_id: - raise Exception("Failed to create RSS key - no lastrowid") - - if podcast_ids and len(podcast_ids) > 0 and -1 not in podcast_ids: - for podcast_id in podcast_ids: - if database_type == "postgresql": - query = 'INSERT INTO "RssKeyMap" (RssKeyID, PodcastID) VALUES (%s, %s)' - else: - query = 'INSERT INTO RssKeyMap (RssKeyID, PodcastID) VALUES (%s, %s)' - cursor.execute(query, (rss_key_id, podcast_id)) - - cnx.commit() - return api_key - except Exception as e: - logging.error(f"Error creating RSS key for user {user_id}: {e}") - cnx.rollback() - raise - finally: - cursor.close() - -def set_rss_key_podcasts(cnx, database_type: str, rss_key_id: int, podcast_ids: list[int]): - cursor = cnx.cursor() - # delete existing podcast ids - if database_type == "postgresql": - query = 'DELETE FROM "RssKeyMap" WHERE RssKeyID = %s' - else: - query = 'DELETE FROM RssKeyMap WHERE RssKeyID = %s' - cursor.execute(query, (rss_key_id,)) - - # insert new podcast ids - for podcast_id in podcast_ids: - if database_type == "postgresql": - query = 'INSERT INTO "RssKeyMap" (RssKeyID, PodcastID) VALUES (%s, %s)' - else: - query = 'INSERT INTO RssKeyMap (RssKeyID, PodcastID) VALUES (%s, %s)' - cursor.execute(query, (rss_key_id, podcast_id)) - - cnx.commit() - cursor.close() - - -def get_user_api_key(cnx, database_type, user_id): - cursor = cnx.cursor() - try: - if database_type == "postgresql": - query = """ - SELECT APIKey - FROM "APIKeys" - WHERE UserID = %s - ORDER BY Created DESC - LIMIT 1 - """ - else: - query = """ - SELECT APIKey - FROM APIKeys - WHERE UserID = %s - ORDER BY Created DESC - LIMIT 1 - """ - cursor.execute(query, (user_id,)) - result = cursor.fetchone() - if result: - return result[0] if isinstance(result, tuple) else result['apikey'] - return None - finally: - cursor.close() - - -def is_same_api_key(cnx, database_type, api_id, api_key): - if database_type == "postgresql": - cursor = cnx.cursor() - query = 'SELECT APIKey FROM "APIKeys" WHERE APIKeyID = %s' - else: # MySQL or MariaDB - cursor = cnx.cursor(dictionary=True) - query = "SELECT APIKey FROM APIKeys WHERE APIKeyID = %s" - - cursor.execute(query, (api_id,)) - result = cursor.fetchone() - cursor.close() - - if result: - if isinstance(result, tuple): - # Convert tuple to dictionary - result = dict(zip([desc[0] for desc in cursor.description], result)) - if database_type == 'postgresql': - if result.get('apikey') == api_key: - return True - else: - if result.get('APIKey') == api_key: - return True - return False - - -def belongs_to_guest_user(cnx, database_type, api_id): - if database_type == "postgresql": - cursor = cnx.cursor() - query = 'SELECT UserID FROM "APIKeys" WHERE APIKeyID = %s' - else: # MySQL or MariaDB - cursor = cnx.cursor(dictionary=True) - query = "SELECT UserID FROM APIKeys WHERE APIKeyID = %s" - - cursor.execute(query, (api_id,)) - result = cursor.fetchone() - cursor.close() - - if result: - if isinstance(result, tuple): - # Convert tuple to dictionary - result = dict(zip([desc[0] for desc in cursor.description], result)) - if database_type == 'postgresql': - return result.get('userid') == 1 - else: - return result.get('UserID') == 1 - return False - - -def delete_api(cnx, database_type, api_id): - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'DELETE FROM "APIKeys" WHERE APIKeyID = %s' - else: # MySQL or MariaDB - query = "DELETE FROM APIKeys WHERE APIKeyID = %s" - - cursor.execute(query, (api_id,)) - cnx.commit() - cursor.close() - - - -def set_username(cnx, database_type, user_id, new_username): - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'UPDATE "Users" SET Username = %s WHERE UserID = %s' - else: # MySQL or MariaDB - query = "UPDATE Users SET Username = %s WHERE UserID = %s" - - cursor.execute(query, (new_username, user_id)) - cnx.commit() - cursor.close() - - - -def set_password(cnx, database_type, user_id, hash_pw): - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'UPDATE "Users" SET Hashed_PW = %s WHERE UserID = %s' - else: # MySQL or MariaDB - query = "UPDATE Users SET Hashed_PW = %s WHERE UserID = %s" - - cursor.execute(query, (hash_pw, user_id)) - cnx.commit() - cursor.close() - - - - -def set_email(cnx, database_type, user_id, new_email): - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'UPDATE "Users" SET Email = %s WHERE UserID = %s' - else: # MySQL or MariaDB - query = "UPDATE Users SET Email = %s WHERE UserID = %s" - - cursor.execute(query, (new_email, user_id)) - cnx.commit() - cursor.close() - - - -def set_fullname(cnx, database_type, user_id, new_name): - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'UPDATE "Users" SET Fullname = %s WHERE UserID = %s' - else: # MySQL or MariaDB - query = "UPDATE Users SET Fullname = %s WHERE UserID = %s" - - cursor.execute(query, (new_name, user_id)) - cnx.commit() - cursor.close() - - - -def set_isadmin(cnx, database_type, user_id, isadmin): - cursor = cnx.cursor() - - if database_type == "postgresql": - query = 'UPDATE "Users" SET IsAdmin = %s WHERE UserID = %s' - # For PostgreSQL, use boolean directly instead of converting to int - cursor.execute(query, (isadmin, user_id)) - else: # MySQL or MariaDB - query = "UPDATE Users SET IsAdmin = %s WHERE UserID = %s" - isadmin_int = int(isadmin) - cursor.execute(query, (isadmin_int, user_id)) - - cnx.commit() - cursor.close() - - - -def delete_user(cnx, database_type, user_id): - cursor = cnx.cursor() - - # Delete user from UserEpisodeHistory table - try: - if database_type == "postgresql": - query = 'DELETE FROM "UserEpisodeHistory" WHERE UserID = %s' - else: # MySQL or MariaDB - query = "DELETE FROM UserEpisodeHistory WHERE UserID = %s" - cursor.execute(query, (user_id,)) - except Exception as e: - print(f"Error deleting from UserEpisodeHistory: {e}") - - # Delete user from DownloadedEpisodes table - try: - if database_type == "postgresql": - query = 'DELETE FROM "DownloadedEpisodes" WHERE UserID = %s' - else: # MySQL or MariaDB - query = "DELETE FROM DownloadedEpisodes WHERE UserID = %s" - cursor.execute(query, (user_id,)) - except Exception as e: - print(f"Error deleting from DownloadedEpisodes: {e}") - - # Delete user from EpisodeQueue table - try: - if database_type == "postgresql": - query = 'DELETE FROM "EpisodeQueue" WHERE UserID = %s' - else: # MySQL or MariaDB - query = "DELETE FROM EpisodeQueue WHERE UserID = %s" - cursor.execute(query, (user_id,)) - except Exception as e: - print(f"Error deleting from EpisodeQueue: {e}") - - # Delete user from Podcasts table - try: - if database_type == "postgresql": - query = 'DELETE FROM "Podcasts" WHERE UserID = %s' - else: # MySQL or MariaDB - query = "DELETE FROM Podcasts WHERE UserID = %s" - cursor.execute(query, (user_id,)) - except Exception as e: - print(f"Error deleting from Podcasts: {e}") - - # Delete user from UserSettings table - try: - if database_type == "postgresql": - query = 'DELETE FROM "UserSettings" WHERE UserID = %s' - else: # MySQL or MariaDB - query = "DELETE FROM UserSettings WHERE UserID = %s" - cursor.execute(query, (user_id,)) - except Exception as e: - print(f"Error deleting from UserSettings: {e}") - - # Delete user from UserStats table - try: - if database_type == "postgresql": - query = 'DELETE FROM "UserStats" WHERE UserID = %s' - else: # MySQL or MariaDB - query = "DELETE FROM UserStats WHERE UserID = %s" - cursor.execute(query, (user_id,)) - except Exception as e: - print(f"Error deleting from UserStats: {e}") - - # Delete user from Users table - if database_type == "postgresql": - query = 'DELETE FROM "Users" WHERE UserID = %s' - else: # MySQL or MariaDB - query = "DELETE FROM Users WHERE UserID = %s" - cursor.execute(query, (user_id,)) - cnx.commit() - - cursor.close() - - - -def user_admin_check(cnx, database_type, user_id): - - logging.info(f"Checking admin status for user ID: {user_id}, database type: {database_type}") - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'SELECT IsAdmin FROM "Users" WHERE UserID = %s' - else: # MySQL or MariaDB - query = "SELECT IsAdmin FROM Users WHERE UserID = %s" - - cursor.execute(query, (user_id,)) - result = cursor.fetchone() - cursor.close() - - logging.info(f"Query result: {result}") - - if result is None: - logging.warning(f"No result found for user ID: {user_id}") - return False - - try: - return bool(result[0] if isinstance(result, tuple) else result['isadmin']) - except KeyError as e: - logging.error(f"KeyError: {e} - Result: {result}") - return False - -def final_admin(cnx, database_type, user_id): - cursor = cnx.cursor() - - if database_type == "postgresql": - query = 'SELECT COUNT(*) FROM "Users" WHERE IsAdmin = TRUE' - else: # MySQL or MariaDB - query = "SELECT COUNT(*) FROM Users WHERE IsAdmin = 1" - - cursor.execute(query) - result = cursor.fetchone() - # Handle both tuple and dict results - admin_count = result[0] if isinstance(result, tuple) else result['count'] - - if admin_count == 1: - if database_type == "postgresql": - query = 'SELECT IsAdmin FROM "Users" WHERE UserID = %s' - else: # MySQL or MariaDB - query = "SELECT IsAdmin FROM Users WHERE UserID = %s" - - cursor.execute(query, (user_id,)) - result = cursor.fetchone() - # Handle both tuple and dict results - is_admin = result[0] if isinstance(result, tuple) else result['isadmin'] - - # For PostgreSQL boolean or MySQL/MariaDB int - if is_admin: - return True - - cursor.close() - return False - -def download_status(cnx, database_type): - if database_type == "postgresql": - from psycopg.rows import dict_row - cursor = cnx.cursor(row_factory=dict_row) - query = 'SELECT DownloadEnabled FROM "AppSettings"' - else: # MySQL or MariaDB - cursor = cnx.cursor(dictionary=True) - query = "SELECT DownloadEnabled FROM AppSettings" - - cursor.execute(query) - result = cursor.fetchone() - cursor.close() - - if result: - if isinstance(result, dict): - download_enabled = result.get('DownloadEnabled') or result.get('downloadenabled') - else: # Handle the case where result is a tuple - download_enabled = result[0] - - if download_enabled == 1: - return True - - return False - - - - -def guest_status(cnx, database_type): - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'SELECT Email FROM "Users" WHERE Email = \'active\'' - else: # MySQL or MariaDB - query = "SELECT Email FROM Users WHERE Email = 'active'" - - cursor.execute(query) - result = cursor.fetchone() - cursor.close() - - if result: - return True - else: - return False - - -def enable_disable_guest(cnx, database_type): - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'UPDATE "Users" SET Email = CASE WHEN Email = \'inactive\' THEN \'active\' ELSE \'inactive\' END WHERE Username = \'guest\'' - else: # MySQL or MariaDB - query = "UPDATE Users SET Email = CASE WHEN Email = 'inactive' THEN 'active' ELSE 'inactive' END WHERE Username = 'guest'" - - cursor.execute(query) - cnx.commit() - cursor.close() - - - -def enable_disable_downloads(cnx, database_type): - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'UPDATE "AppSettings" SET DownloadEnabled = CASE WHEN DownloadEnabled = true THEN false ELSE true END' - else: # MySQL or MariaDB - query = "UPDATE AppSettings SET DownloadEnabled = CASE WHEN DownloadEnabled = 1 THEN 0 ELSE 1 END" - - cursor.execute(query) - cnx.commit() - cursor.close() - - - - -def check_admin_exists(cnx, database_type): - cursor = cnx.cursor() - try: - if database_type == "postgresql": - query = """ - SELECT COUNT(*) as count FROM "Users" - WHERE IsAdmin = TRUE - AND Username != 'background_tasks' - """ - else: # MySQL or MariaDB - query = """ - SELECT COUNT(*) FROM Users - WHERE IsAdmin = 1 - AND Username != 'background_tasks' - """ - cursor.execute(query) - result = cursor.fetchone() - - if result: - if isinstance(result, dict): - return result['count'] - else: - return result[0] - return 0 - finally: - cursor.close() - -def self_service_status(cnx, database_type): - cursor = cnx.cursor() - try: - # Get self-service status - if database_type == "postgresql": - query = 'SELECT SelfServiceUser FROM "AppSettings" WHERE SelfServiceUser = TRUE' - else: # MySQL or MariaDB - query = "SELECT SelfServiceUser FROM AppSettings WHERE SelfServiceUser = 1" - cursor.execute(query) - self_service_result = cursor.fetchone() - - # Get admin status - admin_exists = check_admin_exists(cnx, database_type) - - return { - "status": bool(self_service_result), - "first_admin_created": bool(admin_exists > 0) # Convert to boolean - } - finally: - cursor.close() - -def enable_disable_self_service(cnx, database_type): - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'UPDATE "AppSettings" SET SelfServiceUser = CASE WHEN SelfServiceUser = true THEN false ELSE true END' - else: # MySQL or MariaDB - query = "UPDATE AppSettings SET SelfServiceUser = CASE WHEN SelfServiceUser = 1 THEN 0 ELSE 1 END" - - cursor.execute(query) - cnx.commit() - cursor.close() - - - -def verify_api_key(cnx, database_type, passed_key): - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'SELECT * FROM "APIKeys" WHERE APIKey = %s' - else: - query = "SELECT * FROM APIKeys WHERE APIKey = %s" - try: - cursor.execute(query, (passed_key,)) - result = cursor.fetchone() - return True if result else False - except Exception as e: - logging.error(f'verify_api_key error: {str(e)}') - return False - finally: - cursor.close() - -def get_user_gpodder_status(cnx, database_type, user_id): - cursor = cnx.cursor() - try: - print(f"Getting status for user_id: {user_id}") - - if database_type == "postgresql": - query = 'SELECT Pod_Sync_Type, GpodderUrl, GpodderLoginName FROM "Users" WHERE UserID = %s' - else: - query = 'SELECT Pod_Sync_Type, GpodderUrl, GpodderLoginName FROM Users WHERE UserID = %s' - - cursor.execute(query, (user_id,)) - user_data = cursor.fetchone() - print(f"Raw user_data: {user_data}, type: {type(user_data)}") - - if not user_data: - print("No user data found") - return None - - # Handle both dict and tuple return types - if isinstance(user_data, dict): - print("Handling dict type return") - # Check for both uppercase and lowercase keys - sync_type = user_data.get('Pod_Sync_Type') or user_data.get('pod_sync_type') - print(f"Dict sync_type before default: {sync_type}") - sync_type = sync_type if sync_type else "None" - gpodder_url = user_data.get('GpodderUrl') or user_data.get('gpodderurl') - gpodder_login = user_data.get('GpodderLoginName') or user_data.get('gpodderloginname') - else: - # It's a tuple/list - print("Handling tuple/list type return") - sync_type = user_data[0] - print(f"Tuple sync_type before default: {sync_type}") - sync_type = sync_type if sync_type else "None" - gpodder_url = user_data[1] if len(user_data) > 1 else None - gpodder_login = user_data[2] if len(user_data) > 2 else None - - print(f"Final sync_type: {sync_type}") - - result = { - "sync_type": sync_type, - "gpodder_url": gpodder_url, - "gpodder_login": gpodder_login - } - - print(f"Returning user status: {result}") - return result - - except Exception as e: - print(f"Database error in get_user_gpodder_status: {str(e)}") - return None - finally: - cursor.close() - -def update_user_gpodder_sync(cnx, database_type, user_id, new_sync_type): - cursor = cnx.cursor() - try: - print(f"Updating sync type for user_id {user_id} to {new_sync_type}") - cursor.execute( - 'UPDATE "Users" SET Pod_Sync_Type = %s WHERE UserID = %s', - (new_sync_type, user_id) - ) - rows_affected = cursor.rowcount - print(f"Rows affected by update: {rows_affected}") - cnx.commit() - print("Transaction committed") - - # Verify the update was successful - verify_cursor = cnx.cursor() - verify_cursor.execute( - 'SELECT Pod_Sync_Type FROM "Users" WHERE UserID = %s', - (user_id,) - ) - updated_value = verify_cursor.fetchone() - verify_cursor.close() - print(f"Verification after update: {updated_value}") - - return rows_affected > 0 - except Exception as e: - print(f"Database error in update_user_gpodder_sync: {e}") - return False - finally: - cursor.close() - - -def get_rss_feed_status(cnx, database_type: str, user_id: int) -> bool: - cursor = cnx.cursor() - logging.info(f"Checking RSS feed status for user {user_id}") - try: - if database_type == "postgresql": - cursor.execute('SELECT enablerssfeeds FROM "Users" WHERE userid = %s', (user_id,)) - else: - cursor.execute("SELECT EnableRSSFeeds FROM Users WHERE UserID = %s", (user_id,)) - - result = cursor.fetchone() - logging.info(f"RSS feed status raw result: {result}") - - value = get_value_from_result(result, 'enablerssfeeds', False) - logging.info(f"RSS feed status processed value: {value}") - - return bool(value) - except Exception as e: - logging.error(f"Error checking RSS feed status: {e}") - return False - finally: - cursor.close() - - -def toggle_rss_feeds(cnx, database_type: str, user_id: int) -> bool: - cursor = cnx.cursor() - try: - # Get current status - if database_type == "postgresql": - cursor.execute('SELECT EnableRSSFeeds FROM "Users" WHERE UserID = %s', (user_id,)) - else: - cursor.execute("SELECT EnableRSSFeeds FROM Users WHERE UserID = %s", (user_id,)) - - current_status = cursor.fetchone() - - # Handle different return types from psycopg - if current_status is None: - # User not found, default to enabling RSS feeds - new_status = True - elif isinstance(current_status, dict): - # Dictionary format (with dict_row) - current_value = current_status.get('enablerssfeeds') or current_status.get('EnableRSSFeeds') - new_status = not bool(current_value) if current_value is not None else True - elif isinstance(current_status, (tuple, list)): - # Tuple format (default psycopg behavior) - current_value = current_status[0] if current_status else None - new_status = not bool(current_value) if current_value is not None else True - else: - # Fallback - assume enabling - new_status = True - - # Update status - if database_type == "postgresql": - cursor.execute( - 'UPDATE "Users" SET EnableRSSFeeds = %s WHERE UserID = %s', - (new_status, user_id) - ) - else: - cursor.execute( - "UPDATE Users Set EnableRSSFeeds = %s WHERE UserID = %s", - (new_status, user_id) - ) - cnx.commit() - - # If enabling RSS feeds, create an RSS key if one doesn't exist - if new_status: - # Check if user already has an RSS key - if database_type == "postgresql": - cursor.execute('SELECT RssKeyID FROM "RssKeys" WHERE UserID = %s', (user_id,)) - else: - cursor.execute("SELECT RssKeyID FROM RssKeys WHERE UserID = %s", (user_id,)) - - existing_key = cursor.fetchone() - # Check if RSS key exists - handle both tuple and dict returns - has_existing_key = False - if existing_key: - if isinstance(existing_key, dict): - has_existing_key = bool(existing_key.get('rsskeyid') or existing_key.get('RssKeyID')) - elif isinstance(existing_key, (tuple, list)): - has_existing_key = bool(existing_key[0]) - - if not has_existing_key: - try: - # Create RSS key for all podcasts (-1 means all) - create_rss_key(cnx, database_type, user_id, [-1]) - logging.info(f"Created RSS key for user {user_id}") - except Exception as rss_error: - logging.error(f"Failed to create RSS key for user {user_id}: {rss_error}") - raise Exception(f"Failed to create RSS key: {str(rss_error)}") - - return new_status - except Exception as e: - logging.error(f"Error in toggle_rss_feeds for user {user_id}: {e}") - raise - finally: - cursor.close() - - -def get_user_rss_key(cnx, database_type: str, user_id: int) -> str: - """Get the RSS key for a user""" - cursor = cnx.cursor() - try: - if database_type == "postgresql": - cursor.execute('SELECT RssKey FROM "RssKeys" WHERE UserID = %s', (user_id,)) - else: - cursor.execute("SELECT RssKey FROM RssKeys WHERE UserID = %s", (user_id,)) - - result = cursor.fetchone() - if result: - return result[0] if isinstance(result, tuple) else result['rsskey'] - return None - finally: - cursor.close() - - -def parse_date_safely(date_str): - """Safely parse a date string into a datetime object""" - if isinstance(date_str, dt): - return date_str if date_str.tzinfo else date_str.replace(tzinfo=timezone.utc) - - try: - # PostgreSQL timestamp format - dt_obj = dt.strptime(date_str, '%Y-%m-%d %H:%M:%S') - return dt_obj.replace(tzinfo=timezone.utc) - except (ValueError, TypeError): - try: - # Try with microseconds - dt_obj = dt.strptime(date_str, '%Y-%m-%d %H:%M:%S.%f') - return dt_obj.replace(tzinfo=timezone.utc) - except (ValueError, TypeError): - try: - # ISO format - dt_obj = dt.fromisoformat(date_str.replace('Z', '+00:00')) - return dt_obj if dt_obj.tzinfo else dt_obj.replace(tzinfo=timezone.utc) - except (ValueError, TypeError): - # Default to current time if all parsing fails - return dt.now(timezone.utc) - - -def get_value_from_rss_result(result, key_name: str, default=None): - """Helper function to safely extract values from psycopg results""" - if result is None: - return default - - # Handle dictionary result - if isinstance(result, dict): - # Try different case variations for PostgreSQL - return result.get(key_name.lower()) or result.get(key_name.upper()) or default - - # Handle tuple result - if isinstance(result, (tuple, list)) and len(result) > 0: - return result[0] if result[0] is not None else default - - return default - -# Define the custom feed class at module level -class PodcastFeed(feedgenerator.Rss201rev2Feed): - def root_attributes(self): - attrs = super().root_attributes() - attrs['xmlns:itunes'] = 'http://www.itunes.com/dtds/podcast-1.0.dtd' - return attrs - - def add_root_elements(self, handler): - super().add_root_elements(handler) - # Access podcast_image and podcast_name through instance variables - if hasattr(self, 'podcast_image') and self.podcast_image: - handler.addQuickElement('itunes:image', - attrs={'href': self.podcast_image}) - handler.startElement('image', {}) - handler.addQuickElement('url', self.podcast_image) - handler.addQuickElement('title', self.podcast_name) - handler.addQuickElement('link', 'https://github.com/madeofpendletonwool/pinepods') - handler.endElement('image') - - def add_item_elements(self, handler, item): - super().add_item_elements(handler, item) - if 'artwork_url' in item: - handler.addQuickElement('itunes:image', - attrs={'href': item['artwork_url']}) - - -def generate_podcast_rss(database_type: str, cnx, rss_key: dict, limit: int, source_type: str, domain: str, podcast_id: Optional[List[int]] = None) -> str: - from datetime import datetime as dt, timezone - cursor = cnx.cursor() - logging.basicConfig(level=logging.INFO) - logger = logging.getLogger(__name__) - user_id = rss_key.get('user_id') - podcast_ids = rss_key.get('podcast_ids') - key = rss_key.get('key') - - # If podcast_id parameter is provided, use it; otherwise use RSS key podcast_ids - print(f'DEBUG: podcast_id param: {podcast_id}, type: {type(podcast_id)}') - print(f'DEBUG: rss_key podcast_ids: {podcast_ids}, type: {type(podcast_ids)}') - - explicit_podcast_filter = False - if podcast_id and len(podcast_id) > 0: - podcast_ids = podcast_id - explicit_podcast_filter = True - print(f'DEBUG: Using explicit podcast filter, podcast_ids set to: {podcast_ids}') - - podcast_filter = explicit_podcast_filter or (podcast_ids and len(podcast_ids) > 0 and -1 not in podcast_ids) - print(f'DEBUG: podcast_filter: {podcast_filter}, explicit_podcast_filter: {explicit_podcast_filter}') - try: - # Check if RSS feeds are enabled for user - if not get_rss_feed_status(cnx, database_type, user_id): - raise HTTPException(status_code=403, detail="RSS feeds not enabled for this user") - - # Get user info for feed metadata - if database_type == "postgresql": - cursor.execute('SELECT username FROM "Users" WHERE userid = %s', (user_id,)) - else: - cursor.execute("SELECT Username FROM Users WHERE UserID = %s", (user_id,)) - - user = cursor.fetchone() - if not user: - raise HTTPException(status_code=404, detail="User not found") - - username = get_value_from_rss_result(user, 'username', 'Unknown User') - - if not source_type or source_type != "youtube": - # Build the query with correct case for each database type - if database_type == "postgresql": - base_query = ''' - SELECT - e.episodeid, - e.podcastid, - e.episodetitle, - e.episodedescription, - CASE WHEN de.episodeid IS NULL - THEN e.episodeurl - ELSE CONCAT(CAST(%s AS TEXT), '/api/data/stream/', e.episodeid, '?api_key=', CAST(%s AS TEXT), '&user_id=', pp.userid) - END as episodeurl, - e.episodeartwork, - e.episodepubdate, - e.episodeduration, - pp.podcastname, - pp.author, - pp.artworkurl, - pp.description as podcastdescription - FROM "Episodes" e - JOIN "Podcasts" pp ON e.podcastid = pp.podcastid - LEFT JOIN "DownloadedEpisodes" de ON e.episodeid = de.episodeid - WHERE pp.userid = %s - ''' - else: - base_query = ''' - SELECT - e.EpisodeID, - e.PodcastID, - e.EpisodeTitle COLLATE utf8mb4_unicode_ci as EpisodeTitle, - e.EpisodeDescription COLLATE utf8mb4_unicode_ci as EpisodeDescription, - CASE WHEN de.EpisodeID IS NULL - THEN e.EpisodeURL COLLATE utf8mb4_unicode_ci - ELSE CONCAT(CAST(%s AS CHAR), '/api/data/stream/', CAST(e.EpisodeID AS CHAR), '?api_key=', CAST(%s AS CHAR), '&user_id=', pp.UserID) - END COLLATE utf8mb4_unicode_ci as EpisodeURL, - e.EpisodeArtwork COLLATE utf8mb4_unicode_ci as EpisodeArtwork, - e.EpisodePubDate, - e.EpisodeDuration, - pp.PodcastName COLLATE utf8mb4_unicode_ci as PodcastName, - pp.Author COLLATE utf8mb4_unicode_ci as Author, - pp.ArtworkURL COLLATE utf8mb4_unicode_ci as ArtworkURL, - pp.Description COLLATE utf8mb4_unicode_ci as PodcastDescription - FROM Episodes e - JOIN Podcasts pp ON e.PodcastID = pp.PodcastID - LEFT JOIN DownloadedEpisodes de ON e.EpisodeID = de.EpisodeID - WHERE pp.UserID = %s - ''' - - params = [domain, key, user_id] - if podcast_filter: - if database_type == "postgresql": - base_query += ' AND pp.podcastid = ANY(%s)' - params.append(podcast_ids) - else: - placeholders = ','.join(['%s'] * len(podcast_ids)) - base_query += f' AND pp.PodcastID IN ({placeholders})' - params.extend(podcast_ids) - - # For MySQL, only add YouTube union if we actually need it to avoid collation issues - add_youtube_union = not source_type or source_type == "youtube" - if database_type != "postgresql": - # For MySQL, check if any of the filtered podcasts are actually YouTube channels - if podcast_filter and add_youtube_union: - cursor_temp = cnx.cursor() - placeholders = ','.join(['%s'] * len(podcast_ids)) - cursor_temp.execute(f"SELECT COUNT(*) FROM Podcasts WHERE PodcastID IN ({placeholders}) AND IsYouTubeChannel = 1", podcast_ids) - youtube_count = cursor_temp.fetchone()[0] - cursor_temp.close() - add_youtube_union = youtube_count > 0 - - if add_youtube_union: - if base_query: - base_query += "\nUNION ALL\n" - - if database_type == "postgresql": - base_query += ''' - SELECT - y.videoid as episodeid, - y.podcastid, - y.videotitle as episodetitle, - y.videodescription as episodetitle, - CONCAT(CAST(%s AS TEXT), '/api/data/stream/', CAST(y.videoid AS TEXT), '?api_key=', CAST(%s AS TEXT), '&type=youtube&user_id=', pv.userid) as episodeurl, - y.thumbnailurl as episodeartwork, - y.publishedat as episodepubdate, - y.duration as episodeduration, - pv.podcastname, - pv.author, - pv.artworkurl, - pv.description as podcastdescription - FROM "YouTubeVideos" y - JOIN "Podcasts" pv on y.podcastid = pv.podcastid - WHERE pv.userid = %s - ''' - else: - base_query += ''' - SELECT - y.VideoID as EpisodeID, - y.PodcastID as PodcastID, - y.VideoTitle COLLATE utf8mb4_unicode_ci as EpisodeTitle, - y.VideoDescription COLLATE utf8mb4_unicode_ci as EpisodeDescription, - CONCAT(CAST(%s AS CHAR), '/api/data/stream/', CAST(y.VideoID AS CHAR), '?api_key=', CAST(%s AS CHAR), '&type=youtube&user_id=', pv.UserID) COLLATE utf8mb4_unicode_ci as EpisodeURL, - y.ThumbnailURL COLLATE utf8mb4_unicode_ci as EpisodeArtwork, - y.PublishedAt as EpisodePubDate, - y.Duration as EpisodeDuration, - pv.PodcastName COLLATE utf8mb4_unicode_ci as PodcastName, - pv.Author COLLATE utf8mb4_unicode_ci as Author, - pv.ArtworkURL COLLATE utf8mb4_unicode_ci as ArtworkURL, - pv.Description COLLATE utf8mb4_unicode_ci as PodcastDescription - FROM YouTubeVideos y - JOIN Podcasts pv on y.PodcastID = pv.PodcastID - WHERE pv.UserID = %s - ''' - params += [domain, key, user_id] - - if podcast_filter: - if database_type == "postgresql": - base_query += ' AND y.podcastid = ANY(%s)' - params.append(podcast_ids) - else: - placeholders = ','.join(['%s'] * len(podcast_ids)) - base_query += f' AND y.PodcastID IN ({placeholders})' - params.extend(podcast_ids) - - base_query += f' ORDER BY 7 DESC' - # Only apply limit if no specific podcast is requested - if not explicit_podcast_filter: - base_query += ' LIMIT %s' - params.append(limit) - cursor.execute(base_query, params) - print('q1') - # Get column names and create result mapping - columns = [desc[0].lower() for desc in cursor.description] - column_map = {name: idx for idx, name in enumerate(columns)} - # Inside generate_podcast_rss, replace the dictionary creation section with: - - episodes = [] - all_rows = cursor.fetchall() - - for row_idx, row in enumerate(all_rows): - try: - episode_dict = {} - - # If row is already a dictionary, use it directly - if isinstance(row, dict): - source_dict = row - else: - # Convert tuple to dictionary using column names - source_dict = dict(zip(columns, row)) - - # Process each column - for col in columns: - try: - - # Get value either from dictionary or by index - if isinstance(row, dict): - raw_value = row.get(col) - else: - col_idx = column_map[col] - raw_value = row[col_idx] if col_idx < len(row) else None - - # Special handling for dates - if col == 'episodepubdate' and raw_value is not None: - try: - if isinstance(raw_value, dt): - value = raw_value if raw_value.tzinfo else raw_value.replace(tzinfo=timezone.utc) - else: - value = dt.strptime(str(raw_value), '%Y-%m-%d %H:%M:%S') - value = value.replace(tzinfo=timezone.utc) - except Exception as e: - logger.error(f"Date parsing failed: {str(e)}") - value = dt.now(timezone.utc) - else: - value = raw_value if raw_value is not None else '' - - episode_dict[col] = value - - except Exception as e: - logger.error(f"Error processing column {col}: {str(e)}", exc_info=True) - # Use safe defaults - if col == 'episodepubdate': - episode_dict[col] = dt.now(timezone.utc) - else: - episode_dict[col] = '' - - episodes.append(episode_dict) - - except Exception as e: - logger.error(f"Error processing row {row_idx}: {str(e)}", exc_info=True) - continue - - logger.info(f"Successfully processed {len(episodes)} episodes") - - # Get podcast name if podcast_id is provided - podcast_name = "All Podcasts" - feed_image = "/var/www/html/static/assets/favicon.png" # Default to Pinepods logo - - # Get podcast details when filtering by specific podcast(s) - if podcast_filter: - try: - if database_type == "postgresql": - cursor.execute( - 'SELECT podcastname, artworkurl, description FROM "Podcasts" WHERE podcastid = ANY(%s)', - (podcast_ids,) - ) - else: - # For single podcast ID, use direct equals instead of IN - if len(podcast_ids) == 1: - cursor.execute( - "SELECT PodcastName, ArtworkURL, Description FROM Podcasts WHERE PodcastID = %s", - (podcast_ids[0],) - ) - else: - placeholders = ','.join(['%s'] * len(podcast_ids)) - cursor.execute( - f"SELECT PodcastName, ArtworkURL, Description FROM Podcasts WHERE PodcastID IN ({placeholders})", - tuple(podcast_ids) - ) - result = cursor.fetchone() - if result: - if isinstance(result, tuple): - podcast_name = result[0] or "Unknown Podcast" - feed_image = result[1] or feed_image - podcast_description = result[2] or "No description available" - else: - podcast_name = result.get('podcastname') or result.get('PodcastName') or "Unknown Podcast" - feed_image = result.get('artworkurl') or result.get('ArtworkURL') or feed_image - podcast_description = result.get('description') or result.get('Description') or "No description available" - else: - podcast_name = "Unknown Podcast" - podcast_description = "No description available" - except Exception as e: - logger.error(f"Error fetching podcast details: {str(e)}") - podcast_name = "Unknown Podcast" - podcast_description = "No description available" - - # Set appropriate description based on whether we're filtering by specific podcast - if podcast_filter and 'podcast_description' in locals(): - feed_description = podcast_description - else: - feed_description = f"RSS feed for {'all' if not podcast_filter else 'selected'} podcasts from Pinepods" - - # Initialize feed with custom class - feed = PodcastFeed( - title=f"Pinepods - {podcast_name}", - link="https://github.com/madeofpendletonwool/pinepods", - description=feed_description, - language="en", - author_name=username, - feed_url="", - ttl="60" - ) - - # Set feed image - use podcast artwork for specific podcast, Pinepods logo for all podcasts - feed.podcast_image = feed_image - feed.podcast_name = podcast_name - - # Set podcast image if available - if episodes: - feed.podcast_image = episodes[0].get('artworkurl') - feed.podcast_name = podcast_name - - # Debug logging for image URLs - logger.info(f"Podcast artwork URL: {episodes[0].get('artworkurl') if episodes else 'None'}") - - # Add items to feed - for episode in episodes: - try: - episode_image = episode.get('episodeartwork') or episode.get('artworkurl', '') - # Ensure URLs don't have double-encoded ampersands - episode_url = str(episode.get('episodeurl', '')).replace('&', '&') - - feed.add_item( - title=str(episode.get('episodetitle', 'Untitled Episode')), - link=episode_url, - description=str(episode.get('episodedescription', '')), - unique_id=str(episode.get('episodeid', '')), - enclosure=feedgenerator.Enclosure( - url=episode_url, - length=str(episode.get('episodeduration', '0')), - mime_type='audio/mpeg' - ), - pubdate=episode.get('episodepubdate', dt.now(timezone.utc)), - author=str(episode.get('author', '')), - artwork_url=episode_image - ) - except Exception as e: - logger.error(f"Error adding episode to feed: {str(e)}") - continue - - # Generate RSS and fix URL encoding - rss_content = feed.writeString('utf-8') - # Fix XML-escaped ampersands in URLs to ensure they work properly - rss_content = rss_content.replace('&user_id=', '&user_id=') - rss_content = rss_content.replace('&api_key=', '&api_key=') - rss_content = rss_content.replace('&type=', '&type=') - # Fix HTML entities that should be actual HTML tags in descriptions - rss_content = rss_content.replace('<', '<') - rss_content = rss_content.replace('>', '>') - rss_content = rss_content.replace('"', '"') - rss_content = rss_content.replace(''', "'") - # Note: Keep & as-is since it's a valid XML entity - return rss_content - - except Exception as e: - logger.error(f"Error generating RSS feed: {str(e)}", exc_info=True) - raise HTTPException(status_code=500, detail=f"Error generating RSS feed: {str(e)}") - finally: - cursor.close() - - -def set_rss_feed_status(cnx, database_type: str, user_id: int, enable: bool) -> bool: - cursor = cnx.cursor() - try: - if database_type == "postgresql": - cursor.execute( - 'UPDATE "Users" SET EnableRSSFeeds = %s WHERE UserID = %s', - (enable, user_id) - ) - else: - cursor.execute( - "UPDATE Users SET EnableRSSFeeds = %s WHERE UserID = %s", - (enable, user_id) - ) - cnx.commit() - return enable - finally: - cursor.close() - - -def get_api_key(cnx, database_type, username): - try: - cursor = cnx.cursor() - # Get the UserID - if database_type == "postgresql": - query = 'SELECT UserID FROM "Users" WHERE username = %s' - else: # MySQL or MariaDB - query = "SELECT UserID FROM Users WHERE username = %s" - cursor.execute(query, (username,)) - result = cursor.fetchone() - - # Check if a result is returned. If not, return None - if result is None: - print("No user found with the provided username.") - cursor.close() - return None - user_id = result[0] if isinstance(result, tuple) else result["userid"] - - # Check the type of the result and access the is_admin value accordingly - # is_admin = is_admin_result[0] if isinstance(is_admin_result, tuple) else is_admin_result["IsAdmin"] if is_admin_result else 0 - - - # Get the API Key using the fetched UserID, and limit the results to 1 - if database_type == "postgresql": - query = 'SELECT APIKey FROM "APIKeys" WHERE UserID = %s LIMIT 1' - else: # MySQL or MariaDB - query = "SELECT APIKey FROM APIKeys WHERE UserID = %s LIMIT 1" - cursor.execute(query, (user_id,)) - result = cursor.fetchone() - - cursor.close() - - # Check and return the API key or create a new one if not found - if result: - api_key = result[0] if isinstance(result, tuple) else result["apikey"] - print(f"Result: {api_key}") - return api_key # Adjust the index if the API key is in a different column - else: - print("No API key found for the provided user. Creating a new one...") - return create_api_key(cnx, database_type, user_id) - - except Exception as e: - print(f"An error occurred: {str(e)}") - return f"An error occurred: {str(e)}" - - -def get_api_user(cnx, database_type, api_key): - try: - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'SELECT UserID FROM "APIKeys" WHERE APIKey = %s LIMIT 1' - else: # MySQL or MariaDB - query = "SELECT UserID FROM APIKeys WHERE APIKey = %s LIMIT 1" - - cursor.execute(query, (api_key,)) - result = cursor.fetchone() - - cursor.close() - - if result: - user_id = result[0] if isinstance(result, tuple) else result['userid'] - print(f"Result: {user_id}") - return user_id # Adjust the index if the API key is in a different column - else: - print(f"ApiKey Not Found") - return "ApiKey Not Found" - - except Exception as e: - print(f"An error occurred: {str(e)}") - return f"An error occurred: {str(e)}" - -def get_value_from_result(result, key_name: str, default=None): - """Helper function to safely extract values from psycopg results""" - if result is None: - return default - - # Handle dictionary result - if isinstance(result, dict): - # Try different case variations for PostgreSQL - return result.get(key_name.lower()) or result.get(key_name.upper()) or default - - # Handle tuple result - if isinstance(result, (tuple, list)): - # For tuples, we assume the first element is what we want - return result[0] if result[0] is not None else default - - return default - - -def id_from_api_key(cnx, database_type: str, passed_key: str, rss_feed: bool = False): - cursor = cnx.cursor() - try: - params = [passed_key] - if database_type == "postgresql": - query = 'SELECT userid FROM "APIKeys" WHERE apikey = %s' - else: - query = "SELECT UserID FROM APIKeys WHERE APIKey = %s" - - cursor.execute(query, tuple(params)) - result = cursor.fetchone() - if result is None: - logging.error("No result found for API key") - return None - - try: - user_id = get_value_from_result(result, 'userid') - return user_id - except Exception as e: - logging.error(f"Error extracting user_id from result: {e}") - # If we failed to get from dict, try tuple - if isinstance(result, tuple) and len(result) > 0: - return result[0] - raise - - except Exception as e: - logging.error(f"Error in id_from_api_key: {e}") - return None - finally: - cursor.close() - -def get_rss_key_if_valid(cnx, database_type: str, passed_key: str, podcast_ids: Optional[List[int]] = None): - filter_podcast_ids = (podcast_ids and len(podcast_ids) > 0 and -1 not in podcast_ids) - cursor = cnx.cursor() - try: - params = [passed_key] - if database_type == "postgresql": - query = ''' - SELECT fk.userid, STRING_AGG(CAST(fkm.podcastid AS TEXT), ',') as podcastids - FROM "RssKeys" fk - LEFT JOIN "RssKeyMap" fkm ON fk.rsskeyid = fkm.rsskeyid - WHERE fk.rsskey = %s - GROUP BY fk.userid - ''' - else: - query = ''' - SELECT fk.UserID, GROUP_CONCAT(fkm.PodcastID) as podcastids - FROM RssKeys fk - LEFT JOIN RssKeyMap fkm ON fk.RssKeyID = fkm.RssKeyID - WHERE fk.RssKey = %s - GROUP BY fk.UserID - ''' - - cursor.execute(query, tuple(params)) - result = cursor.fetchone() - - if result is None: - logging.error("No result found for Feed Key") - return None - - try: - user_id = get_value_from_result(result, 'userid') - key_podcast_ids = get_value_from_result(result, 'podcastids') - logging.info(f"Successfully extracted user_id: {user_id} and podcast_ids: {key_podcast_ids}") - - # Convert podcast_ids string to list of integers - podcast_ids_list = [] - if key_podcast_ids: - podcast_ids_list = [int(pid) for pid in key_podcast_ids.split(',')] - - if filter_podcast_ids: - if not podcast_ids_list or len(podcast_ids_list) == 0 or -1 in podcast_ids_list: - podcast_ids_list = podcast_ids - else: - podcast_ids_list = [pid for pid in podcast_ids_list if pid in podcast_ids] - - return { - 'user_id': user_id, - 'podcast_ids': podcast_ids_list, - 'key': passed_key - } - except Exception as e: - logging.error(f"Error extracting data from result: {e}") - # If we failed to get from dict, try tuple - if isinstance(result, tuple) and len(result) > 0: - user_id = result[0] - key_podcast_ids = result[1] if len(result) > 1 else None - podcast_ids_list = [] - if key_podcast_ids: - podcast_ids_list = [int(pid) for pid in key_podcast_ids.split(',')] - if filter_podcast_ids: - if not podcast_ids_list or len(podcast_ids_list) == 0 or -1 in podcast_ids_list: - podcast_ids_list = podcast_ids - else: - podcast_ids_list = [pid for pid in podcast_ids_list if pid in podcast_ids] - return { - 'user_id': user_id, - 'podcast_ids': podcast_ids_list, - 'key': passed_key - } - raise - - except Exception as e: - logging.error(f"Error in podcasts_from_rss_key: {e}") - return None - finally: - cursor.close() - -def validate_episode_access(cnx, database_type: str, episode_id: int, podcast_ids: Optional[List[int]] = []): - cursor = cnx.cursor() - try: - if database_type == "postgresql": - query = ''' - SELECT COUNT(*) - FROM "Podcasts" - JOIN "Episodes" ON "Podcasts".PodcastID = "Episodes".PodcastID - JOIN "YouTubeVideos" ON "Podcasts".PodcastID = "YouTubeVideos".PodcastID - WHERE ("Episodes".EpisodeID = %s OR "YouTubeVideos".VideoID = %s) - AND ("Podcasts".PodcastID IN(%s)) - ''' - else: - query = ''' - SELECT COUNT(*) - FROM "Podcasts" - JOIN "Episodes" ON "Podcasts".PodcastID = "Episodes".PodcastID - JOIN "YouTubeVideos" ON "Podcasts".PodcastID = "YouTubeVideos".PodcastID - WHERE ("Episodes".EpisodeID = %s OR "YouTubeVideos".VideoID = %s) - AND ("Podcasts".PodcastID IN(%s)) - ''' - cursor.execute(query, (episode_id, episode_id, podcast_ids)) - result = cursor.fetchone() - return result[0] > 0 - except Exception as e: - logging.error(f"Error in validate_episode_access: {e}") - return False - finally: - cursor.close() - -# def check_api_permission(cnx, passed_key): -# import tempfile -# # Create a temporary file to store the content. This is because the mysql command reads from a file. -# with tempfile.NamedTemporaryFile(mode='w+', delete=True) as tempf: -# tempf.write(server_restore_data) -# tempf.flush() -# cmd = [ -# "mysql", -# "-h", 'db', -# "-P", '3306', -# "-u", "root", -# "-p" + database_pass, -# "pypods_database" -# ] - -# # Use the file's content as input for the mysql command -# with open(tempf.name, 'r') as file: -# process = subprocess.Popen(cmd, stdin=file, stdout=subprocess.PIPE, stderr=subprocess.PIPE) -# stdout, stderr = process.communicate() - -# if process.returncode != 0: -# raise Exception(f"Restoration failed with error: {stderr.decode()}") - -# return "Restoration completed successfully!" - - -def get_stats(cnx, database_type, user_id): - logging.info(f"Fetching stats for user ID: {user_id}, database type: {database_type}") - cursor = cnx.cursor() - - # First get the user stats - if database_type == "postgresql": - query = 'SELECT UserCreated, PodcastsPlayed, TimeListened, PodcastsAdded, EpisodesSaved, EpisodesDownloaded FROM "UserStats" WHERE UserID = %s' - else: # MySQL or MariaDB - query = "SELECT UserCreated, PodcastsPlayed, TimeListened, PodcastsAdded, EpisodesSaved, EpisodesDownloaded FROM UserStats WHERE UserID = %s" - - print('getting stats') - cursor.execute(query, (user_id,)) - stats_results = cursor.fetchall() - print(f'stats {stats_results}') - logging.info(f"Stats query results: {stats_results}") - - if not stats_results: - logging.warning(f"No stats found for user ID: {user_id}") - return None - - stats_result = stats_results[0] - - # Now get ONLY GpodderUrl and Pod_Sync_Type from Users table - if database_type == "postgresql": - gpodder_query = 'SELECT GpodderUrl, Pod_Sync_Type FROM "Users" WHERE UserID = %s' - else: # MySQL or MariaDB - gpodder_query = "SELECT GpodderUrl, Pod_Sync_Type FROM Users WHERE UserID = %s" - - cursor.execute(gpodder_query, (user_id,)) - gpodder_results = cursor.fetchone() - cursor.close() - - logging.info(f"GPodder query results: {gpodder_results}") - - # Check if stats_result is a dictionary or a tuple and create stats accordingly - if isinstance(stats_result, dict): - if database_type == 'postgresql': - stats = { - "UserCreated": stats_result['usercreated'], - "PodcastsPlayed": stats_result['podcastsplayed'], - "TimeListened": stats_result['timelistened'], - "PodcastsAdded": stats_result['podcastsadded'], - "EpisodesSaved": stats_result['episodessaved'], - "EpisodesDownloaded": stats_result['episodesdownloaded'] - } - else: - stats = { - "UserCreated": stats_result['UserCreated'], - "PodcastsPlayed": stats_result['PodcastsPlayed'], - "TimeListened": stats_result['TimeListened'], - "PodcastsAdded": stats_result['PodcastsAdded'], - "EpisodesSaved": stats_result['EpisodesSaved'], - "EpisodesDownloaded": stats_result['EpisodesDownloaded'] - } - else: # Assume it's a tuple - stats = { - "UserCreated": stats_result[0], - "PodcastsPlayed": stats_result[1], - "TimeListened": stats_result[2], - "PodcastsAdded": stats_result[3], - "EpisodesSaved": stats_result[4], - "EpisodesDownloaded": stats_result[5] - } - - # Add ONLY GpodderUrl and Pod_Sync_Type to the stats - if isinstance(gpodder_results, dict): - if database_type == 'postgresql': - stats.update({ - "GpodderUrl": gpodder_results['gpodderurl'], - "Pod_Sync_Type": gpodder_results['pod_sync_type'] - }) - else: - stats.update({ - "GpodderUrl": gpodder_results['GpodderUrl'], - "Pod_Sync_Type": gpodder_results['Pod_Sync_Type'] - }) - else: # Assume it's a tuple - stats.update({ - "GpodderUrl": gpodder_results[0], - "Pod_Sync_Type": gpodder_results[1] - }) - - logging.info(f"Fetched stats with GPodder info: {stats}") - return stats - - -def saved_episode_list(database_type, cnx, user_id): - if database_type == "postgresql": - cnx.row_factory = dict_row - cursor = cnx.cursor() - query = """ - SELECT * FROM ( - SELECT - "Podcasts".PodcastName as podcastname, - "Episodes".EpisodeTitle as episodetitle, - "Episodes".EpisodePubDate as episodepubdate, - "Episodes".EpisodeDescription as episodedescription, - "Episodes".EpisodeID as episodeid, - "Episodes".EpisodeArtwork as episodeartwork, - "Episodes".EpisodeURL as episodeurl, - "Episodes".EpisodeDuration as episodeduration, - "Podcasts".WebsiteURL as websiteurl, - "UserEpisodeHistory".ListenDuration as listenduration, - "Episodes".Completed as completed, - TRUE as saved, - CASE WHEN "EpisodeQueue".EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS queued, - CASE WHEN "DownloadedEpisodes".EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded, - FALSE as is_youtube - FROM "SavedEpisodes" - INNER JOIN "Episodes" ON "SavedEpisodes".EpisodeID = "Episodes".EpisodeID - INNER JOIN "Podcasts" ON "Episodes".PodcastID = "Podcasts".PodcastID - LEFT JOIN "UserEpisodeHistory" ON - "SavedEpisodes".EpisodeID = "UserEpisodeHistory".EpisodeID - AND "UserEpisodeHistory".UserID = %s - LEFT JOIN "EpisodeQueue" ON - "SavedEpisodes".EpisodeID = "EpisodeQueue".EpisodeID - AND "EpisodeQueue".UserID = %s - AND "EpisodeQueue".is_youtube = FALSE - LEFT JOIN "DownloadedEpisodes" ON - "SavedEpisodes".EpisodeID = "DownloadedEpisodes".EpisodeID - AND "DownloadedEpisodes".UserID = %s - WHERE "SavedEpisodes".UserID = %s - - UNION ALL - - SELECT - "Podcasts".PodcastName as podcastname, - "YouTubeVideos".VideoTitle as episodetitle, - "YouTubeVideos".PublishedAt as episodepubdate, - "YouTubeVideos".VideoDescription as episodedescription, - "YouTubeVideos".VideoID as episodeid, - "YouTubeVideos".ThumbnailURL as episodeartwork, - "YouTubeVideos".VideoURL as episodeurl, - "YouTubeVideos".Duration as episodeduration, - "Podcasts".WebsiteURL as websiteurl, - "UserVideoHistory".ListenDuration as listenduration, - "YouTubeVideos".Completed as completed, - TRUE as saved, - CASE WHEN "EpisodeQueue".EpisodeID IS NOT NULL AND "EpisodeQueue".is_youtube = TRUE THEN TRUE ELSE FALSE END AS queued, - CASE WHEN "DownloadedVideos".VideoID IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded, - TRUE as is_youtube - FROM "SavedVideos" - INNER JOIN "YouTubeVideos" ON "SavedVideos".VideoID = "YouTubeVideos".VideoID - INNER JOIN "Podcasts" ON "YouTubeVideos".PodcastID = "Podcasts".PodcastID - LEFT JOIN "UserVideoHistory" ON - "SavedVideos".VideoID = "UserVideoHistory".VideoID - AND "UserVideoHistory".UserID = %s - LEFT JOIN "EpisodeQueue" ON - "SavedVideos".VideoID = "EpisodeQueue".EpisodeID - AND "EpisodeQueue".UserID = %s - AND "EpisodeQueue".is_youtube = TRUE - LEFT JOIN "DownloadedVideos" ON - "SavedVideos".VideoID = "DownloadedVideos".VideoID - AND "DownloadedVideos".UserID = %s - WHERE "SavedVideos".UserID = %s - ) combined - ORDER BY episodepubdate DESC - """ - else: # MySQL or MariaDB - cursor = cnx.cursor(dictionary=True) - query = """ - SELECT * FROM ( - SELECT - Podcasts.PodcastName as podcastname, - Episodes.EpisodeTitle as episodetitle, - Episodes.EpisodePubDate as episodepubdate, - Episodes.EpisodeDescription as episodedescription, - Episodes.EpisodeID as episodeid, - Episodes.EpisodeArtwork as episodeartwork, - Episodes.EpisodeURL as episodeurl, - Episodes.EpisodeDuration as episodeduration, - Podcasts.WebsiteURL as websiteurl, - UserEpisodeHistory.ListenDuration as listenduration, - Episodes.Completed as completed, - 1 as saved, - CASE WHEN EpisodeQueue.EpisodeID IS NOT NULL THEN 1 ELSE 0 END AS queued, - CASE WHEN DownloadedEpisodes.EpisodeID IS NOT NULL THEN 1 ELSE 0 END AS downloaded, - 0 as is_youtube - FROM SavedEpisodes - INNER JOIN Episodes ON SavedEpisodes.EpisodeID = Episodes.EpisodeID - INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID - LEFT JOIN UserEpisodeHistory ON - SavedEpisodes.EpisodeID = UserEpisodeHistory.EpisodeID - AND UserEpisodeHistory.UserID = %s - LEFT JOIN EpisodeQueue ON - SavedEpisodes.EpisodeID = EpisodeQueue.EpisodeID - AND EpisodeQueue.UserID = %s - AND EpisodeQueue.is_youtube = 0 - LEFT JOIN DownloadedEpisodes ON - SavedEpisodes.EpisodeID = DownloadedEpisodes.EpisodeID - AND DownloadedEpisodes.UserID = %s - WHERE SavedEpisodes.UserID = %s - - UNION ALL - - SELECT - Podcasts.PodcastName as podcastname, - YouTubeVideos.VideoTitle as episodetitle, - YouTubeVideos.PublishedAt as episodepubdate, - YouTubeVideos.VideoDescription as episodedescription, - YouTubeVideos.VideoID as episodeid, - YouTubeVideos.ThumbnailURL as episodeartwork, - YouTubeVideos.VideoURL as episodeurl, - YouTubeVideos.Duration as episodeduration, - Podcasts.WebsiteURL as websiteurl, - UserVideoHistory.ListenDuration as listenduration, - YouTubeVideos.Completed as completed, - 1 as saved, - CASE WHEN EpisodeQueue.EpisodeID IS NOT NULL AND EpisodeQueue.is_youtube = 1 THEN 1 ELSE 0 END AS queued, - CASE WHEN DownloadedVideos.VideoID IS NOT NULL THEN 1 ELSE 0 END AS downloaded, - 1 as is_youtube - FROM SavedVideos - INNER JOIN YouTubeVideos ON SavedVideos.VideoID = YouTubeVideos.VideoID - INNER JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID - LEFT JOIN UserVideoHistory ON - SavedVideos.VideoID = UserVideoHistory.VideoID - AND UserVideoHistory.UserID = %s - LEFT JOIN EpisodeQueue ON - SavedVideos.VideoID = EpisodeQueue.EpisodeID - AND EpisodeQueue.UserID = %s - AND EpisodeQueue.is_youtube = 1 - LEFT JOIN DownloadedVideos ON - SavedVideos.VideoID = DownloadedVideos.VideoID - AND DownloadedVideos.UserID = %s - WHERE SavedVideos.UserID = %s - ) combined - ORDER BY episodepubdate DESC - """ - - # Execute with all params for both unions - we now need 8 user_id parameters - cursor.execute(query, (user_id, user_id, user_id, user_id, user_id, user_id, user_id, user_id)) - rows = cursor.fetchall() - cursor.close() - - if not rows: - return None - - saved_episodes = lowercase_keys(rows) - - if database_type != "postgresql": - bool_fields = ['completed', 'saved', 'queued', 'downloaded', 'is_youtube'] - for episode in saved_episodes: - for field in bool_fields: - if field in episode: - episode[field] = bool(episode[field]) - - return saved_episodes - -def save_episode(cnx, database_type, episode_id, user_id, is_youtube=False): - cursor = cnx.cursor() - try: - if is_youtube: - if database_type == "postgresql": - query = 'INSERT INTO "SavedVideos" (UserID, VideoID) VALUES (%s, %s)' - else: - query = "INSERT INTO SavedVideos (UserID, VideoID) VALUES (%s, %s)" - else: - if database_type == "postgresql": - query = 'INSERT INTO "SavedEpisodes" (UserID, EpisodeID) VALUES (%s, %s)' - else: - query = "INSERT INTO SavedEpisodes (UserID, EpisodeID) VALUES (%s, %s)" - - cursor.execute(query, (user_id, episode_id)) - - # Update UserStats table - if database_type == "postgresql": - query = 'UPDATE "UserStats" SET EpisodesSaved = EpisodesSaved + 1 WHERE UserID = %s' - else: - query = "UPDATE UserStats SET EpisodesSaved = EpisodesSaved + 1 WHERE UserID = %s" - cursor.execute(query, (user_id,)) - - cnx.commit() - return True - except Exception as e: - print(f"Error saving {'video' if is_youtube else 'episode'}: {e}") - return False - finally: - cursor.close() - -def check_saved(cnx, database_type, user_id, episode_id, is_youtube=False): - cursor = cnx.cursor() - try: - if is_youtube: - if database_type == "postgresql": - query = 'SELECT * FROM "SavedVideos" WHERE UserID = %s AND VideoID = %s' - else: - query = "SELECT * FROM SavedVideos WHERE UserID = %s AND VideoID = %s" - else: - if database_type == "postgresql": - query = 'SELECT * FROM "SavedEpisodes" WHERE UserID = %s AND EpisodeID = %s' - else: - query = "SELECT * FROM SavedEpisodes WHERE UserID = %s AND EpisodeID = %s" - - cursor.execute(query, (user_id, episode_id)) - result = cursor.fetchone() - return bool(result) - except Exception as err: - print(f"Error checking saved {'video' if is_youtube else 'episode'}: {err}") - return False - finally: - cursor.close() - -def remove_saved_episode(cnx, database_type, episode_id, user_id, is_youtube=False): - cursor = cnx.cursor() - try: - logging.info(f"Removing {'video' if is_youtube else 'episode'} {episode_id} for user {user_id}") - if is_youtube: - if database_type == "postgresql": - query = """ - SELECT SaveID FROM "SavedVideos" - WHERE VideoID = %s AND UserID = %s - """ - else: - query = """ - SELECT SaveID FROM SavedVideos - WHERE VideoID = %s AND UserID = %s - """ - else: - if database_type == "postgresql": - query = """ - SELECT SaveID FROM "SavedEpisodes" - WHERE EpisodeID = %s AND UserID = %s - """ - else: - query = """ - SELECT SaveID FROM SavedEpisodes - WHERE EpisodeID = %s AND UserID = %s - """ - cursor.execute(query, (episode_id, user_id)) - result = cursor.fetchone() - if not result: - logging.warning(f"No saved {'video' if is_youtube else 'episode'} found for ID {episode_id} and user {user_id}") - return - - # Handle both dictionary and tuple result types - save_id = result['saveid'] if isinstance(result, dict) else result[0] - logging.info(f"Found SaveID: {save_id}") - - # Remove the saved entry - if is_youtube: - if database_type == "postgresql": - query = 'DELETE FROM "SavedVideos" WHERE SaveID = %s' - else: - query = "DELETE FROM SavedVideos WHERE SaveID = %s" - else: - if database_type == "postgresql": - query = 'DELETE FROM "SavedEpisodes" WHERE SaveID = %s' - else: - query = "DELETE FROM SavedEpisodes WHERE SaveID = %s" - - cursor.execute(query, (save_id,)) - rows_affected = cursor.rowcount - logging.info(f"Deleted {rows_affected} rows") - - # Update UserStats - if database_type == "postgresql": - query = 'UPDATE "UserStats" SET EpisodesSaved = EpisodesSaved - 1 WHERE UserID = %s' - else: - query = "UPDATE UserStats SET EpisodesSaved = EpisodesSaved - 1 WHERE UserID = %s" - - cursor.execute(query, (user_id,)) - stats_rows_affected = cursor.rowcount - logging.info(f"Updated {stats_rows_affected} user stats rows") - - cnx.commit() - except Exception as e: - logging.error(f"Error during {'video' if is_youtube else 'episode'} removal: {e}") - cnx.rollback() - finally: - cursor.close() - -def get_categories(cnx, database_type, podcast_id, user_id): - cursor = cnx.cursor() - - try: - if database_type == "postgresql": - query = ( - 'SELECT "categories" ' - 'FROM "Podcasts" ' - 'WHERE "podcastid" = %s AND "userid" = %s' - ) - else: # For MySQL or MariaDB - query = ( - "SELECT Categories " - "FROM Podcasts " - "WHERE PodcastID = %s AND UserID = %s" - ) - cursor.execute(query, (podcast_id, user_id)) - result = cursor.fetchone() - - if not result: - logging.warning("No matching podcast found.") - cursor.close() - return [] - - # Check if the result is a dictionary or a tuple - if isinstance(result, dict): - # For dictionary, access the field by key - categories_field = result.get('categories') # Adjust key based on your schema - elif isinstance(result, tuple): - # For tuple, access the field by index - categories_field = result[0] - else: - logging.error(f"Unexpected result type: {type(result)}") - return [] - - # Split the categories if they exist - categories = categories_field.split(', ') if categories_field else [] - - return categories - - except Exception as e: - logging.error(f"Error retrieving categories: {e}") - raise - finally: - cursor.close() - - - -def add_category(cnx, database_type, podcast_id, user_id, category): - cursor = cnx.cursor() - - try: - if database_type == "postgresql": - query = ( - 'SELECT categories ' - 'FROM "Podcasts" ' - 'WHERE "podcastid" = %s AND "userid" = %s' - ) - else: # For MySQL or MariaDB - query = ( - "SELECT Categories " - "FROM Podcasts " - "WHERE PodcastID = %s AND UserID = %s" - ) - cursor.execute(query, (podcast_id, user_id)) - result = cursor.fetchone() - - if not result: - logging.warning("No matching podcast found.") - cursor.close() - return False - - # Extract the categories and split them into a list - # Check if the result is a dictionary or a tuple - if isinstance(result, dict): - # For dictionary, access the field by key - categories_field = result.get('categories') # Adjust key based on your schema - elif isinstance(result, tuple): - # For tuple, access the field by index - categories_field = result[0] - else: - logging.error(f"Unexpected result type: {type(result)}") - return [] - - # Split the categories if they exist - categories = categories_field.split(', ') if categories_field else [] - - - # Add the new category if it doesn't exist - if category not in categories: - categories.append(category) - - # Join the updated categories back into a comma-separated string - updated_categories = ', '.join(categories) - - # Update the database with the new categories list - if database_type == "postgresql": - update_query = ( - 'UPDATE "Podcasts" ' - 'SET "categories" = %s ' - 'WHERE "podcastid" = %s AND "userid" = %s' - ) - else: - update_query = ( - "UPDATE Podcasts " - "SET Categories = %s " - "WHERE PodcastID = %s AND UserID = %s" - ) - cursor.execute(update_query, (updated_categories, podcast_id, user_id)) - cnx.commit() - - return True - - except Exception as e: - logging.error(f"Error adding category: {e}") - raise - finally: - cursor.close() - -def remove_category(cnx, database_type, podcast_id, user_id, category): - cursor = cnx.cursor() - - try: - if database_type == "postgresql": - query = ( - 'SELECT categories ' - 'FROM "Podcasts" ' - 'WHERE "podcastid" = %s AND "userid" = %s' - ) - else: # For MySQL or MariaDB - query = ( - "SELECT Categories " - "FROM Podcasts " - "WHERE PodcastID = %s AND UserID = %s" - ) - cursor.execute(query, (podcast_id, user_id)) - result = cursor.fetchone() - - print(f'heres cats: {result}') - - if not result: - logging.warning("No matching podcast found.") - cursor.close() - return - - # Extract the categories and split them into a list - # Check if the result is a dictionary or a tuple - if isinstance(result, dict): - # For dictionary, access the field by key - categories_field = result.get('categories') # Adjust key based on your schema - elif isinstance(result, tuple): - # For tuple, access the field by index - categories_field = result[0] - else: - logging.error(f"Unexpected result type: {type(result)}") - return [] - - # Split the categories if they exist - categories = categories_field.split(', ') if categories_field else [] - - # Remove the category if it exists - if category in categories: - categories.remove(category) - - # Join the updated categories back into a comma-separated string - updated_categories = ', '.join(categories) - - # Update the database with the new categories list - if database_type == "postgresql": - update_query = ( - 'UPDATE "Podcasts" ' - 'SET "categories" = %s ' - 'WHERE "podcastid" = %s AND "userid" = %s' - ) - else: - update_query = ( - "UPDATE Podcasts " - "SET Categories = %s " - "WHERE PodcastID = %s AND UserID = %s" - ) - cursor.execute(update_query, (updated_categories, podcast_id, user_id)) - cnx.commit() - - except Exception as e: - logging.error(f"Error removing category: {e}") - raise - finally: - cursor.close() - -def update_feed_cutoff_days(cnx, database_type, podcast_id, user_id, feed_cutoff_days): - cursor = cnx.cursor() - - try: - # Validate that the podcast exists and belongs to the user - if database_type == "postgresql": - query = ( - 'SELECT "podcastid" ' - 'FROM "Podcasts" ' - 'WHERE "podcastid" = %s AND "userid" = %s' - ) - else: # For MySQL or MariaDB - query = ( - "SELECT PodcastID " - "FROM Podcasts " - "WHERE PodcastID = %s AND UserID = %s" - ) - cursor.execute(query, (podcast_id, user_id)) - result = cursor.fetchone() - - if not result: - logging.warning("No matching podcast found or podcast does not belong to the user.") - cursor.close() - return False - - # Update the feed cutoff days - if database_type == "postgresql": - update_query = ( - 'UPDATE "Podcasts" ' - 'SET "feedcutoffdays" = %s ' - 'WHERE "podcastid" = %s AND "userid" = %s' - ) - else: - update_query = ( - "UPDATE Podcasts " - "SET FeedCutoffDays = %s " - "WHERE PodcastID = %s AND UserID = %s" - ) - cursor.execute(update_query, (feed_cutoff_days, podcast_id, user_id)) - cnx.commit() - - return True - - except Exception as e: - logging.error(f"Error updating feed cutoff days: {e}") - raise - finally: - cursor.close() - -def get_feed_cutoff_days(cnx, database_type, podcast_id, user_id): - cursor = cnx.cursor() - - try: - if database_type == "postgresql": - query = ( - 'SELECT "feedcutoffdays" ' - 'FROM "Podcasts" ' - 'WHERE "podcastid" = %s AND "userid" = %s' - ) - else: # For MySQL or MariaDB - query = ( - "SELECT FeedCutoffDays " - "FROM Podcasts " - "WHERE PodcastID = %s AND UserID = %s" - ) - cursor.execute(query, (podcast_id, user_id)) - result = cursor.fetchone() - - if not result: - logging.warning("No matching podcast found.") - cursor.close() - return None - - # Check if the result is a dictionary or a tuple - if isinstance(result, dict): - # For dictionary, access the field by key - feed_cutoff_days = result.get('feedcutoffdays') # PostgreSQL key - elif isinstance(result, tuple): - # For tuple, access the field by index - feed_cutoff_days = result[0] - else: - logging.error(f"Unexpected result type: {type(result)}") - return None - - return feed_cutoff_days - - except Exception as e: - logging.error(f"Error getting feed cutoff days: {e}") - raise - finally: - cursor.close() - - -# In database_functions/functions.py -# -def send_ntfy_notification(topic: str, server_url: str, title: str, message: str): - try: - import requests - - # Default to ntfy.sh if no server URL provided - base_url = server_url.rstrip('/') if server_url else "https://ntfy.sh" - url = f"{base_url}/{topic}" - - headers = { - "Title": title, - "Content-Type": "text/plain" - } - - response = requests.post(url, headers=headers, data=message) - response.raise_for_status() - return True - except Exception as e: - logging.error(f"Error sending NTFY notification: {e}") - return False - -def send_gotify_notification(server_url: str, token: str, title: str, message: str): - try: - import requests - - url = f"{server_url.rstrip('/')}/message" - - headers = { - "X-Gotify-Key": token - } - - data = { - "title": title, - "message": message, - "priority": 5 - } - - response = requests.post(url, headers=headers, json=data) - response.raise_for_status() - return True - except Exception as e: - logging.error(f"Error sending Gotify notification: {e}") - return False - -# Base notification functions for actual episode notifications -def send_ntfy_notification(topic: str, server_url: str, title: str, message: str): - try: - base_url = server_url.rstrip('/') if server_url else "https://ntfy.sh" - url = f"{base_url}/{topic}" - headers = { - "Title": title, - "Content-Type": "text/plain" - } - # Add short timeout - if it takes more than 2 seconds, abort - response = requests.post(url, headers=headers, data=message, timeout=2) - response.raise_for_status() - return True - except requests.Timeout: - logging.error(f"Timeout sending notification to {url}") - return False - except Exception as e: - logging.error(f"Error sending NTFY notification: {e}") - return False - -def send_gotify_notification(server_url: str, token: str, title: str, message: str): - try: - url = f"{server_url.rstrip('/')}/message" - data = { - "title": title, - "message": message, - "priority": 5 - } - headers = { - "X-Gotify-Key": token - } - response = requests.post(url, headers=headers, json=data, timeout=2) - response.raise_for_status() - return True - except requests.Timeout: - logging.error(f"Timeout sending notification to {url}") - return False - except Exception as e: - logging.error(f"Error sending Gotify notification: {e}") - return False - -# Test versions that specifically mention they're test notifications -def send_test_ntfy_notification(topic: str, server_url: str): - return send_ntfy_notification( - topic=topic, - server_url=server_url, - title="Pinepods Test Notification", - message="This is a test notification from your Pinepods server!" - ) - -def send_test_gotify_notification(server_url: str, token: str): - return send_gotify_notification( - server_url=server_url, - token=token, - title="Pinepods Test Notification", - message="This is a test notification from your Pinepods server!" - ) - -def send_test_notification(cnx, database_type, user_id, platform): - cursor = cnx.cursor() - try: - if database_type == "postgresql": - query = """ - SELECT Platform, Enabled, NtfyTopic, NtfyServerUrl, GotifyUrl, GotifyToken - FROM "UserNotificationSettings" - WHERE UserID = %s AND Platform = %s AND Enabled = TRUE - """ - else: - query = """ - SELECT Platform, Enabled, NtfyTopic, NtfyServerUrl, GotifyUrl, GotifyToken - FROM UserNotificationSettings - WHERE UserID = %s AND Platform = %s AND Enabled = TRUE - """ - cursor.execute(query, (user_id, platform)) - settings = cursor.fetchone() - if not settings: - logging.error("No notification settings found") - return False - - if isinstance(settings, dict): # PostgreSQL dict case - if platform == 'ntfy': - return send_test_ntfy_notification( - topic=settings['ntfytopic'], # Note: lowercase from your logs - server_url=settings['ntfyserverurl'] # Note: lowercase from your logs - ) - else: # gotify - return send_test_gotify_notification( - server_url=settings['gotifyurl'], # Note: lowercase from your logs - token=settings['gotifytoken'] # Note: lowercase from your logs - ) - else: # MySQL or PostgreSQL tuple case - if platform == 'ntfy': - return send_test_ntfy_notification( - settings[2], # NtfyTopic - settings[3] # NtfyServerUrl - ) - else: # gotify - return send_test_gotify_notification( - settings[4], # GotifyUrl - settings[5] # GotifyToken - ) - except Exception as e: - logging.error(f"Error sending test notification: {e}") - logging.error(f"Settings object type: {type(settings)}") - logging.error(f"Settings content: {settings}") - return False - finally: - cursor.close() - -def get_notification_settings(cnx, database_type, user_id): - cursor = cnx.cursor() - try: - if database_type == "postgresql": - query = """ - SELECT Platform, Enabled, NtfyTopic, NtfyServerUrl, GotifyUrl, GotifyToken - FROM "UserNotificationSettings" - WHERE UserID = %s - """ - else: # MySQL - query = """ - SELECT Platform, Enabled, NtfyTopic, NtfyServerUrl, GotifyUrl, GotifyToken - FROM UserNotificationSettings - WHERE UserID = %s - """ - - cursor.execute(query, (user_id,)) - result = cursor.fetchall() - - settings = [] - for row in result: - if isinstance(row, dict): # PostgreSQL with RealDictCursor - setting = { - "platform": row["platform"], - "enabled": bool(row["enabled"]), - "ntfy_topic": row["ntfytopic"], - "ntfy_server_url": row["ntfyserverurl"], - "gotify_url": row["gotifyurl"], - "gotify_token": row["gotifytoken"] - } - else: # MySQL or PostgreSQL with regular cursor - setting = { - "platform": row[0], - "enabled": bool(row[1]), - "ntfy_topic": row[2], - "ntfy_server_url": row[3], - "gotify_url": row[4], - "gotify_token": row[5] - } - settings.append(setting) - - return settings - - except Exception as e: - logging.error(f"Error fetching notification settings: {e}") - raise - finally: - cursor.close() - -def update_notification_settings(cnx, database_type, user_id, platform, enabled, ntfy_topic=None, - ntfy_server_url=None, gotify_url=None, gotify_token=None): - cursor = cnx.cursor() - try: - # First check if settings exist for this user and platform - if database_type == "postgresql": - check_query = """ - SELECT 1 FROM "UserNotificationSettings" - WHERE UserID = %s AND Platform = %s - """ - else: - check_query = """ - SELECT 1 FROM UserNotificationSettings - WHERE UserID = %s AND Platform = %s - """ - - cursor.execute(check_query, (user_id, platform)) - exists = cursor.fetchone() is not None - - if exists: - if database_type == "postgresql": - query = """ - UPDATE "UserNotificationSettings" - SET Enabled = %s, - NtfyTopic = %s, - NtfyServerUrl = %s, - GotifyUrl = %s, - GotifyToken = %s - WHERE UserID = %s AND Platform = %s - """ - else: - query = """ - UPDATE UserNotificationSettings - SET Enabled = %s, - NtfyTopic = %s, - NtfyServerUrl = %s, - GotifyUrl = %s, - GotifyToken = %s - WHERE UserID = %s AND Platform = %s - """ - else: - if database_type == "postgresql": - query = """ - INSERT INTO "UserNotificationSettings" - (UserID, Platform, Enabled, NtfyTopic, NtfyServerUrl, GotifyUrl, GotifyToken) - VALUES (%s, %s, %s, %s, %s, %s, %s) - """ - else: - query = """ - INSERT INTO UserNotificationSettings - (UserID, Platform, Enabled, NtfyTopic, NtfyServerUrl, GotifyUrl, GotifyToken) - VALUES (%s, %s, %s, %s, %s, %s, %s) - """ - - params = ( - enabled if exists else user_id, - ntfy_topic if exists else platform, - ntfy_server_url if exists else enabled, - gotify_url if exists else ntfy_topic, - gotify_token if exists else ntfy_server_url, - user_id if exists else gotify_url, - platform if exists else gotify_token - ) - - cursor.execute(query, params) - cnx.commit() - return True - - except Exception as e: - logging.error(f"Error updating notification settings: {e}") - cnx.rollback() - raise - finally: - cursor.close() - - - - -def increment_played(cnx, database_type, user_id): - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'UPDATE "UserStats" SET PodcastsPlayed = PodcastsPlayed + 1 WHERE UserID = %s' - else: # MySQL or MariaDB - query = "UPDATE UserStats SET PodcastsPlayed = PodcastsPlayed + 1 WHERE UserID = %s" - cursor.execute(query, (user_id,)) - cnx.commit() - cursor.close() - -def increment_listen_time(cnx, database_type, user_id): - cursor = cnx.cursor() - - # Update UserStats table to increment PodcastsPlayed count - if database_type == "postgresql": - query = ('UPDATE "UserStats" SET TimeListened = TimeListened + 1 ' - "WHERE UserID = %s") - else: - query = ("UPDATE UserStats SET TimeListened = TimeListened + 1 " - "WHERE UserID = %s") - cursor.execute(query, (user_id,)) - cnx.commit() - - cursor.close() - # cnx.close() - - - -def get_user_episode_count(cnx, database_type, user_id): - cursor = cnx.cursor() - if database_type == "postgresql": - query = ( - 'SELECT COUNT(*) ' - 'FROM "Episodes" ' - 'INNER JOIN "Podcasts" ON "Episodes".PodcastID = "Podcasts".PodcastID ' - 'WHERE "Podcasts".UserID = %s' - ) - else: # MySQL or MariaDB - query = ( - "SELECT COUNT(*) " - "FROM Episodes " - "INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID " - "WHERE Podcasts.UserID = %s" - ) - - cursor.execute(query, (user_id,)) - episode_count = cursor.fetchone()[0] - cursor.close() - - return episode_count - - - -def get_user_episode_count(cnx, database_type, user_id): - cursor = cnx.cursor() - if database_type == "postgresql": - query = ( - 'SELECT COUNT(*) ' - 'FROM "Episodes" ' - 'INNER JOIN "Podcasts" ON "Episodes".PodcastID = "Podcasts".PodcastID ' - 'WHERE "Podcasts".UserID = %s' - ) - else: # MySQL or MariaDB - query = ( - "SELECT COUNT(*) " - "FROM Episodes " - "INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID " - "WHERE Podcasts.UserID = %s" - ) - - cursor.execute(query, (user_id,)) - episode_count = cursor.fetchone()[0] - cursor.close() - - return episode_count - - -def check_podcast(cnx, database_type, user_id, podcast_name, podcast_url): - cursor = None - try: - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'SELECT PodcastID FROM "Podcasts" WHERE UserID = %s AND PodcastName = %s AND FeedURL = %s' - else: # MySQL or MariaDB - query = "SELECT PodcastID FROM Podcasts WHERE UserID = %s AND PodcastName = %s AND FeedURL = %s" - - cursor.execute(query, (user_id, podcast_name, podcast_url)) - - return cursor.fetchone() is not None - except Exception: - return False - finally: - if cursor: - cursor.close() - -def check_youtube_channel(cnx, database_type, user_id, channel_name, channel_url): - cursor = None - try: - cursor = cnx.cursor() - if database_type == "postgresql": - query = ''' - SELECT PodcastID - FROM "Podcasts" - WHERE UserID = %s - AND PodcastName = %s - AND FeedURL = %s - AND IsYouTubeChannel = TRUE - ''' - else: # MySQL or MariaDB - query = ''' - SELECT PodcastID - FROM Podcasts - WHERE UserID = %s - AND PodcastName = %s - AND FeedURL = %s - AND IsYouTubeChannel = TRUE - ''' - cursor.execute(query, (user_id, channel_name, channel_url)) - return cursor.fetchone() is not None - except Exception: - return False - finally: - if cursor: - cursor.close() - -def check_youtube_channel_id(cnx, database_type, podcast_id): - cursor = None - try: - cursor = cnx.cursor() - if database_type == "postgresql": - query = ''' - SELECT IsYouTubeChannel - FROM "Podcasts" - WHERE PodcastID = %s - AND IsYouTubeChannel = TRUE - ''' - else: # MySQL or MariaDB - query = ''' - SELECT IsYouTubeChannel - FROM Podcasts - WHERE PodcastID = %s - AND IsYouTubeChannel = TRUE - ''' - cursor.execute(query, (podcast_id,)) - result = cursor.fetchone() - - # Handle different return types from different database adapters - if result is not None: - # If result is a dict (psycopg2 with dict cursor) - if isinstance(result, dict): - return True - # If result is a tuple/list (standard cursor) - elif isinstance(result, (tuple, list)): - return True - # Any other non-None result means we found a match - else: - return True - return False - except Exception as e: - print(f"Error checking if YouTube channel: {e}") - return False - finally: - if cursor: - cursor.close() - -def reset_password_create_code(cnx, database_type, user_email): - reset_code = ''.join(random.choices(string.ascii_uppercase + string.digits, k=6)) - cursor = cnx.cursor() - - # Check if a user with this email exists - if database_type == "postgresql": - check_query = """ - SELECT UserID - FROM "Users" - WHERE Email = %s - """ - else: - check_query = """ - SELECT UserID - FROM Users - WHERE Email = %s - """ - cursor.execute(check_query, (user_email,)) - result = cursor.fetchone() - if result is None: - cursor.close() - # cnx.close() - return False - - # If the user exists, update the reset code and expiry - reset_expiry = datetime.datetime.now() + datetime.timedelta(hours=1) - - if database_type == "postgresql": - update_query = """ - UPDATE "Users" - SET Reset_Code = %s, - Reset_Expiry = %s - WHERE Email = %s - """ - else: - update_query = """ - UPDATE Users - SET Reset_Code = %s, - Reset_Expiry = %s - WHERE Email = %s - """ - params = (reset_code, reset_expiry.strftime('%Y-%m-%d %H:%M:%S'), user_email) - try: - cursor.execute(update_query, params) - cnx.commit() - except Exception as e: - print(f"Error when trying to update reset code: {e}") - cursor.close() - # cnx.close() - return False - - cursor.close() - # cnx.close() - - return reset_code - -def reset_password_remove_code(cnx, database_type, email): - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'UPDATE "Users" SET Reset_Code = NULL, Reset_Expiry = NULL WHERE Email = %s' - else: - query = "UPDATE Users SET Reset_Code = NULL, Reset_Expiry = NULL WHERE Email = %s" - cursor.execute(query, (email,)) - cnx.commit() - return cursor.rowcount > 0 - - -def verify_password(cnx, database_type, username: str, password: str) -> bool: - cursor = cnx.cursor() - if database_type == "postgresql": - cursor.execute('SELECT Hashed_PW FROM "Users" WHERE Username = %s', (username,)) - else: - cursor.execute("SELECT Hashed_PW FROM Users WHERE Username = %s", (username,)) - result = cursor.fetchone() - cursor.close() - - if not result: - return False # User not found - - hashed_password = result[0] - - ph = PasswordHasher() - try: - # Attempt to verify the password - ph.verify(hashed_password, password) - # If verification does not raise an exception, password is correct - # Optionally rehash the password if needed (argon2 can detect this) - if ph.check_needs_rehash(hashed_password): - new_hash = ph.hash(password) - # Update database with new hash if necessary - # You'll need to implement this part - # update_hashed_password(cnx, username, new_hash) - return True - except VerifyMismatchError: - # If verification fails, password is incorrect - return False - - -def verify_reset_code(cnx, database_type, user_email, reset_code): - cursor = cnx.cursor() - - if database_type == "postgresql": - select_query = """ - SELECT Reset_Code, Reset_Expiry - FROM "Users" - WHERE Email = %s - """ - else: - select_query = """ - SELECT Reset_Code, Reset_Expiry - FROM Users - WHERE Email = %s - """ - cursor.execute(select_query, (user_email,)) - result = cursor.fetchone() - - cursor.close() - # cnx.close() - - # Check if a user with this email exists - if result is None: - return None - - # Check if the reset code is valid and not expired - stored_code, expiry = result - if stored_code == reset_code and datetime.datetime.now() < expiry: - return True - - return False - -def check_reset_user(cnx, database_type, username, email): - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'SELECT * FROM "Users" WHERE Username = %s AND Email = %s' - else: - query = "SELECT * FROM Users WHERE Username = %s AND Email = %s" - cursor.execute(query, (username, email)) - result = cursor.fetchone() - return result is not None - - -def reset_password_prompt(cnx, database_type, user_email, hashed_pw): - cursor = cnx.cursor() - if database_type == "postgresql": - update_query = """ - UPDATE "Users" - SET Hashed_PW = %s, - Reset_Code = NULL, - Reset_Expiry = NULL - WHERE Email = %s - """ - else: - update_query = """ - UPDATE Users - SET Hashed_PW = %s, - Reset_Code = NULL, - Reset_Expiry = NULL - WHERE Email = %s - """ - params = (hashed_pw, user_email) - cursor.execute(update_query, params) - - if cursor.rowcount == 0: - return None - - cnx.commit() - cursor.close() - # cnx.close() - - return "Password Reset Successfully" - -def get_episode_metadata(database_type, cnx, episode_id, user_id, person_episode=False, is_youtube=False): - if database_type == "postgresql": - from psycopg.rows import dict_row - cnx.row_factory = dict_row - cursor = cnx.cursor() - - if is_youtube: - # Query for YouTube videos - query_youtube = """ - SELECT "Podcasts".PodcastID, "Podcasts".PodcastIndexID, "Podcasts".FeedURL, - "Podcasts".PodcastName, "Podcasts".ArtworkURL, - "YouTubeVideos".VideoTitle as EpisodeTitle, - "YouTubeVideos".PublishedAt as EpisodePubDate, - "YouTubeVideos".VideoDescription as EpisodeDescription, - "YouTubeVideos".ThumbnailURL as EpisodeArtwork, - "YouTubeVideos".VideoURL as EpisodeURL, - "YouTubeVideos".Duration as EpisodeDuration, - "YouTubeVideos".VideoID as EpisodeID, - "YouTubeVideos".ListenPosition as ListenDuration, - "YouTubeVideos".Completed, - CASE WHEN q.EpisodeID IS NOT NULL THEN true ELSE false END as is_queued, - CASE WHEN s.EpisodeID IS NOT NULL THEN true ELSE false END as is_saved, - CASE WHEN d.EpisodeID IS NOT NULL THEN true ELSE false END as is_downloaded, - TRUE::boolean as is_youtube - FROM "YouTubeVideos" - INNER JOIN "Podcasts" ON "YouTubeVideos".PodcastID = "Podcasts".PodcastID - LEFT JOIN "EpisodeQueue" q ON "YouTubeVideos".VideoID = q.EpisodeID AND q.UserID = %s - LEFT JOIN "SavedEpisodes" s ON "YouTubeVideos".VideoID = s.EpisodeID AND s.UserID = %s - LEFT JOIN "DownloadedEpisodes" d ON "YouTubeVideos".VideoID = d.EpisodeID AND d.UserID = %s - WHERE "YouTubeVideos".VideoID = %s AND "Podcasts".UserID = %s - """ - cursor.execute(query_youtube, (user_id, user_id, user_id, episode_id, user_id)) - result = cursor.fetchone() - - # If not found, try with system user (1) - if not result: - cursor.execute(query_youtube, (user_id, user_id, user_id, episode_id, 1)) - result = cursor.fetchone() - - elif person_episode: - # First get the episode from PeopleEpisodes and match with Episodes using title and URL - query_people = """ - SELECT pe.*, - p.PodcastID, p.PodcastName, p.ArtworkURL as podcast_artwork, - p.FeedURL, p.WebsiteURL, p.PodcastIndexID, - e.EpisodeID as real_episode_id, - COALESCE(pe.EpisodeArtwork, p.ArtworkURL) as final_artwork, - CASE WHEN q.EpisodeID IS NOT NULL THEN true ELSE false END as is_queued, - CASE WHEN s.EpisodeID IS NOT NULL THEN true ELSE false END as is_saved, - CASE WHEN d.EpisodeID IS NOT NULL THEN true ELSE false END as is_downloaded, - FALSE::boolean as is_youtube - FROM "PeopleEpisodes" pe - JOIN "Podcasts" p ON pe.PodcastID = p.PodcastID - JOIN "Episodes" e ON ( - e.EpisodeTitle = pe.EpisodeTitle - AND e.EpisodeURL = pe.EpisodeURL - ) - LEFT JOIN "EpisodeQueue" q ON e.EpisodeID = q.EpisodeID AND q.UserID = %s - LEFT JOIN "SavedEpisodes" s ON e.EpisodeID = s.EpisodeID AND s.UserID = %s - LEFT JOIN "DownloadedEpisodes" d ON e.EpisodeID = d.EpisodeID AND d.UserID = %s - WHERE pe.EpisodeID = %s - """ - cursor.execute(query_people, (user_id, user_id, user_id, episode_id)) - people_episode = cursor.fetchone() - - if not people_episode: - raise ValueError(f"No people episode found with ID {episode_id}") - - # Now get additional data using the real episode ID - query_history = """ - SELECT "UserEpisodeHistory".ListenDuration, "Episodes".Completed - FROM "Episodes" - LEFT JOIN "UserEpisodeHistory" ON - "Episodes".EpisodeID = "UserEpisodeHistory".EpisodeID - AND "UserEpisodeHistory".UserID = %s - WHERE "Episodes".EpisodeID = %s - """ - cursor.execute(query_history, (user_id, people_episode['real_episode_id'])) - history_data = cursor.fetchone() or {} - - # Combine the data - result = { - 'episodetitle': people_episode['episodetitle'], - 'podcastname': people_episode['podcastname'], - 'podcastid': people_episode['podcastid'], - 'podcastindexid': people_episode['podcastindexid'], - 'feedurl': people_episode['feedurl'], - 'episodepubdate': people_episode['episodepubdate'].isoformat() if people_episode['episodepubdate'] else None, - 'episodedescription': people_episode['episodedescription'], - 'episodeartwork': people_episode['final_artwork'], - 'episodeurl': people_episode['episodeurl'], - 'episodeduration': people_episode['episodeduration'], - 'listenduration': history_data.get('listenduration'), - 'episodeid': people_episode['real_episode_id'], - 'completed': history_data.get('completed', False), - 'is_queued': people_episode['is_queued'], - 'is_saved': people_episode['is_saved'], - 'is_downloaded': people_episode['is_downloaded'] - } - else: - # Original query for regular episodes - query = """ - SELECT "Podcasts".PodcastID, "Podcasts".PodcastIndexID, "Podcasts".FeedURL, - "Podcasts".PodcastName, "Podcasts".ArtworkURL, "Episodes".EpisodeTitle, - "Episodes".EpisodePubDate, "Episodes".EpisodeDescription, - "Episodes".EpisodeArtwork, "Episodes".EpisodeURL, "Episodes".EpisodeDuration, - "Episodes".EpisodeID, "Podcasts".WebsiteURL, - "UserEpisodeHistory".ListenDuration, "Episodes".Completed, - CASE WHEN q.EpisodeID IS NOT NULL THEN true ELSE false END as is_queued, - CASE WHEN s.EpisodeID IS NOT NULL THEN true ELSE false END as is_saved, - CASE WHEN d.EpisodeID IS NOT NULL THEN true ELSE false END as is_downloaded, - FALSE::boolean as is_youtube - FROM "Episodes" - INNER JOIN "Podcasts" ON "Episodes".PodcastID = "Podcasts".PodcastID - LEFT JOIN "UserEpisodeHistory" ON - "Episodes".EpisodeID = "UserEpisodeHistory".EpisodeID - AND "Podcasts".UserID = "UserEpisodeHistory".UserID - LEFT JOIN "EpisodeQueue" q ON "Episodes".EpisodeID = q.EpisodeID AND q.UserID = %s - LEFT JOIN "SavedEpisodes" s ON "Episodes".EpisodeID = s.EpisodeID AND s.UserID = %s - LEFT JOIN "DownloadedEpisodes" d ON "Episodes".EpisodeID = d.EpisodeID AND d.UserID = %s - WHERE "Episodes".EpisodeID = %s AND "Podcasts".UserID = %s - """ - cursor.execute(query, (user_id, user_id, user_id, episode_id, user_id)) - result = cursor.fetchone() - - # If not found, try with system user (1) - if not result: - cursor.execute(query, (user_id, user_id, user_id, episode_id, 1)) - result = cursor.fetchone() - - cursor.close() - - if not result: - raise ValueError(f"No episode found with ID {episode_id}" + - (" for person episode" if person_episode else f" for user {user_id}")) - - lower_row = lowercase_keys(result) - bool_fix = convert_bools(lower_row, database_type) - return bool_fix - - - else: - cursor = cnx.cursor(dictionary=True) - if is_youtube: - # MariaDB version of YouTube videos query - query = """ - SELECT Podcasts.PodcastID, Podcasts.PodcastIndexID, Podcasts.FeedURL, - Podcasts.PodcastName, Podcasts.ArtworkURL, - YouTubeVideos.VideoTitle as EpisodeTitle, - YouTubeVideos.PublishedAt as EpisodePubDate, - YouTubeVideos.VideoDescription as EpisodeDescription, - YouTubeVideos.ThumbnailURL as EpisodeArtwork, - YouTubeVideos.VideoURL as EpisodeURL, - YouTubeVideos.Duration as EpisodeDuration, - YouTubeVideos.VideoID as EpisodeID, - YouTubeVideos.ListenPosition as ListenDuration, - YouTubeVideos.Completed, - CASE WHEN q.EpisodeID IS NOT NULL THEN 1 ELSE 0 END as is_queued, - CASE WHEN s.EpisodeID IS NOT NULL THEN 1 ELSE 0 END as is_saved, - CASE WHEN d.EpisodeID IS NOT NULL THEN 1 ELSE 0 END as is_downloaded, - 1 as is_youtube - FROM YouTubeVideos - INNER JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID - LEFT JOIN EpisodeQueue q ON YouTubeVideos.VideoID = q.EpisodeID AND q.UserID = %s - LEFT JOIN SavedEpisodes s ON YouTubeVideos.VideoID = s.EpisodeID AND s.UserID = %s - LEFT JOIN DownloadedEpisodes d ON YouTubeVideos.VideoID = d.EpisodeID AND d.UserID = %s - WHERE YouTubeVideos.VideoID = %s AND Podcasts.UserID = %s - """ - cursor.execute(query, (user_id, user_id, user_id, episode_id, user_id)) - result = cursor.fetchone() - elif person_episode: - # MariaDB version of people episodes query - query_people = """ - SELECT pe.*, - p.PodcastID, p.PodcastName, p.ArtworkURL as podcast_artwork, - p.FeedURL, p.WebsiteURL, p.PodcastIndexID, - e.EpisodeID as real_episode_id, - COALESCE(pe.EpisodeArtwork, p.ArtworkURL) as final_artwork, - CASE WHEN q.EpisodeID IS NOT NULL THEN 1 ELSE 0 END as is_queued, - CASE WHEN s.EpisodeID IS NOT NULL THEN 1 ELSE 0 END as is_saved, - CASE WHEN d.EpisodeID IS NOT NULL THEN 1 ELSE 0 END as is_downloaded, - FALSE as is_youtube - FROM PeopleEpisodes pe - JOIN Podcasts p ON pe.PodcastID = p.PodcastID - JOIN Episodes e ON ( - e.EpisodeTitle = pe.EpisodeTitle - AND e.EpisodeURL = pe.EpisodeURL - ) - LEFT JOIN EpisodeQueue q ON e.EpisodeID = q.EpisodeID AND q.UserID = %s - LEFT JOIN SavedEpisodes s ON e.EpisodeID = s.EpisodeID AND s.UserID = %s - LEFT JOIN DownloadedEpisodes d ON e.EpisodeID = d.EpisodeID AND d.UserID = %s - WHERE pe.EpisodeID = %s - """ - cursor.execute(query_people, (user_id, user_id, user_id, episode_id)) - people_episode = cursor.fetchone() - - if not people_episode: - raise ValueError(f"No people episode found with ID {episode_id}") - - # Get additional data using the real episode ID - query_history = """ - SELECT UserEpisodeHistory.ListenDuration, Episodes.Completed - FROM Episodes - LEFT JOIN UserEpisodeHistory ON - Episodes.EpisodeID = UserEpisodeHistory.EpisodeID - AND UserEpisodeHistory.UserID = %s - WHERE Episodes.EpisodeID = %s - """ - cursor.execute(query_history, (user_id, people_episode['real_episode_id'])) - history_data = cursor.fetchone() or {} - - # Combine the data - result = { - 'episodetitle': people_episode['episodetitle'], - 'podcastname': people_episode['podcastname'], - 'podcastid': people_episode['podcastid'], - 'podcastindexid': people_episode['podcastindexid'], - 'feedurl': people_episode['feedurl'], - 'episodepubdate': people_episode['episodepubdate'].isoformat() if people_episode['episodepubdate'] else None, - 'episodedescription': people_episode['episodedescription'], - 'episodeartwork': people_episode['final_artwork'], - 'episodeurl': people_episode['episodeurl'], - 'episodeduration': people_episode['episodeduration'], - 'listenduration': history_data.get('listenduration'), - 'episodeid': people_episode['real_episode_id'], - 'completed': bool(history_data.get('completed', 0)), - 'is_queued': bool(people_episode['is_queued']), - 'is_saved': bool(people_episode['is_saved']), - 'is_downloaded': bool(people_episode['is_downloaded']) - } - else: - # MariaDB version of regular episodes query - query = """ - SELECT Podcasts.PodcastID, Podcasts.PodcastIndexID, Podcasts.FeedURL, - Podcasts.PodcastName, Podcasts.ArtworkURL, Episodes.EpisodeTitle, - Episodes.EpisodePubDate, Episodes.EpisodeDescription, - Episodes.EpisodeArtwork, Episodes.EpisodeURL, Episodes.EpisodeDuration, - Episodes.EpisodeID, Podcasts.WebsiteURL, - UserEpisodeHistory.ListenDuration, Episodes.Completed, - CASE WHEN q.EpisodeID IS NOT NULL THEN 1 ELSE 0 END as is_queued, - CASE WHEN s.EpisodeID IS NOT NULL THEN 1 ELSE 0 END as is_saved, - CASE WHEN d.EpisodeID IS NOT NULL THEN 1 ELSE 0 END as is_downloaded, - FALSE as is_youtube - FROM Episodes - INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID - LEFT JOIN UserEpisodeHistory ON - Episodes.EpisodeID = UserEpisodeHistory.EpisodeID - AND Podcasts.UserID = UserEpisodeHistory.UserID - LEFT JOIN EpisodeQueue q ON Episodes.EpisodeID = q.EpisodeID AND q.UserID = %s - LEFT JOIN SavedEpisodes s ON Episodes.EpisodeID = s.EpisodeID AND s.UserID = %s - LEFT JOIN DownloadedEpisodes d ON Episodes.EpisodeID = d.EpisodeID AND d.UserID = %s - WHERE Episodes.EpisodeID = %s AND Podcasts.UserID = %s - """ - cursor.execute(query, (user_id, user_id, user_id, episode_id, user_id)) - result = cursor.fetchone() - - # If not found, try with system user (1) - if not result: - cursor.execute(query, (user_id, user_id, user_id, episode_id, 1)) - result = cursor.fetchone() - - cursor.close() - - if not result: - raise ValueError(f"No episode found with ID {episode_id}" + - (" for person episode" if person_episode else f" for user {user_id}")) - - # Convert boolean fields for MariaDB - if result: - result['completed'] = bool(result.get('completed', 0)) - result['is_queued'] = bool(result.get('is_queued', 0)) - result['is_saved'] = bool(result.get('is_saved', 0)) - result['is_downloaded'] = bool(result.get('is_downloaded', 0)) - result['is_youtube'] = bool(result.get('is_youtube', 0)) - - # Format date if present - if result.get('episodepubdate'): - result['episodepubdate'] = result['episodepubdate'].isoformat() - - lower_row = lowercase_keys(result) - bool_fix = convert_bools(lower_row, database_type) - return bool_fix - - -def get_episode_metadata_id(database_type, cnx, episode_id): - if database_type == "postgresql": - from psycopg.rows import dict_row - cnx.row_factory = dict_row - cursor = cnx.cursor() - query = """ - SELECT * FROM ( - SELECT - "Podcasts".PodcastID, - "Podcasts".FeedURL, - "Podcasts".PodcastName, - "Podcasts".ArtworkURL, - "Episodes".EpisodeTitle, - "Episodes".EpisodePubDate, - "Episodes".EpisodeDescription, - "Episodes".EpisodeArtwork, - "Episodes".EpisodeURL, - "Episodes".EpisodeDuration, - "Episodes".EpisodeID, - "Podcasts".WebsiteURL, - "UserEpisodeHistory".ListenDuration, - "Episodes".Completed, - FALSE::boolean as is_youtube - FROM "Episodes" - INNER JOIN "Podcasts" ON "Episodes".PodcastID = "Podcasts".PodcastID - LEFT JOIN "UserEpisodeHistory" ON - "Episodes".EpisodeID = "UserEpisodeHistory".EpisodeID - AND "Podcasts".UserID = "UserEpisodeHistory".UserID - WHERE "Episodes".EpisodeID = %s - - UNION ALL - - SELECT - "Podcasts".PodcastID, - "Podcasts".FeedURL, - "Podcasts".PodcastName, - "Podcasts".ArtworkURL, - "YouTubeVideos".VideoTitle as EpisodeTitle, - "YouTubeVideos".PublishedAt as EpisodePubDate, - "YouTubeVideos".VideoDescription as EpisodeDescription, - "YouTubeVideos".ThumbnailURL as EpisodeArtwork, - "YouTubeVideos".VideoURL as EpisodeURL, - "YouTubeVideos".Duration as EpisodeDuration, - "YouTubeVideos".VideoID as EpisodeID, - "Podcasts".WebsiteURL, - "YouTubeVideos".ListenPosition as ListenDuration, - "YouTubeVideos".Completed, - TRUE::boolean as is_youtube - FROM "YouTubeVideos" - INNER JOIN "Podcasts" ON "YouTubeVideos".PodcastID = "Podcasts".PodcastID - WHERE "YouTubeVideos".VideoID = %s - ) combined - LIMIT 1 - """ - else: # MySQL or MariaDB - cursor = cnx.cursor(dictionary=True) - query = """ - SELECT * FROM ( - SELECT - Podcasts.PodcastID, - Podcasts.FeedURL, - Podcasts.PodcastName, - Podcasts.ArtworkURL, - Episodes.EpisodeTitle, - Episodes.EpisodePubDate, - Episodes.EpisodeDescription, - Episodes.EpisodeArtwork, - Episodes.EpisodeURL, - Episodes.EpisodeDuration, - Episodes.EpisodeID, - Podcasts.WebsiteURL, - UserEpisodeHistory.ListenDuration, - Episodes.Completed, - FALSE as is_youtube - FROM Episodes - INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID - LEFT JOIN UserEpisodeHistory ON - Episodes.EpisodeID = UserEpisodeHistory.EpisodeID - AND Podcasts.UserID = UserEpisodeHistory.UserID - WHERE Episodes.EpisodeID = %s - - UNION ALL - - SELECT - Podcasts.PodcastID, - Podcasts.FeedURL, - Podcasts.PodcastName, - Podcasts.ArtworkURL, - YouTubeVideos.VideoTitle as EpisodeTitle, - YouTubeVideos.PublishedAt as EpisodePubDate, - YouTubeVideos.VideoDescription as EpisodeDescription, - YouTubeVideos.ThumbnailURL as EpisodeArtwork, - YouTubeVideos.VideoURL as EpisodeURL, - YouTubeVideos.Duration as EpisodeDuration, - YouTubeVideos.VideoID as EpisodeID, - Podcasts.WebsiteURL, - YouTubeVideos.ListenPosition as ListenDuration, - YouTubeVideos.Completed, - TRUE as is_youtube - FROM YouTubeVideos - INNER JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID - WHERE YouTubeVideos.VideoID = %s - ) combined - LIMIT 1 - """ - - cursor.execute(query, (episode_id, episode_id)) - row = cursor.fetchone() - cursor.close() - - if not row: - raise ValueError(f"No episode found with ID {episode_id}") - - lower_row = lowercase_keys(row) - bool_fix = convert_bools(lower_row, database_type) - return bool_fix - - - -import logging - -def save_mfa_secret(database_type, cnx, user_id, mfa_secret): - if database_type == "postgresql": - cursor = cnx.cursor() - query = 'UPDATE "Users" SET MFA_Secret = %s WHERE UserID = %s' - else: # MySQL or MariaDB - cursor = cnx.cursor(dictionary=True) - query = "UPDATE Users SET MFA_Secret = %s WHERE UserID = %s" - - try: - cursor.execute(query, (mfa_secret, user_id)) - cnx.commit() - cursor.close() - logging.info(f"Successfully saved MFA secret for user") - return True - except Exception as e: - logging.error(f"Error saving MFA secret for user") - return False - - -def check_mfa_enabled(database_type, cnx, user_id): - if database_type == "postgresql": - cursor = cnx.cursor() - query = 'SELECT MFA_Secret FROM "Users" WHERE UserID = %s' - else: # MySQL or MariaDB - cursor = cnx.cursor(dictionary=True) - query = "SELECT MFA_Secret FROM Users WHERE UserID = %s" - - try: - cursor.execute(query, (user_id,)) - result = cursor.fetchone() - cursor.close() - - if result is None: - return False - - # For PostgreSQL, the column name will be 'mfa_secret' in lowercase - # For MySQL, the column name might be 'MFA_Secret' so we access it using lowercase - if database_type != "postgresql": - result = {k.lower(): v for k, v in result.items()} - - mfa_secret = result[0] if isinstance(result, tuple) else result.get('mfa_secret') - return bool(mfa_secret) - except Exception as e: - print("Error checking MFA status:", e) - return False - - - - -def get_mfa_secret(database_type, cnx, user_id): - if database_type == "postgresql": - cursor = cnx.cursor() - query = 'SELECT MFA_Secret FROM "Users" WHERE UserID = %s' - else: # MySQL or MariaDB - cursor = cnx.cursor(dictionary=True) - query = "SELECT MFA_Secret FROM Users WHERE UserID = %s" - - try: - cursor.execute(query, (user_id,)) - result = cursor.fetchone() - cursor.close() - - if isinstance(result, tuple): - # Convert result to dictionary format for consistency - result = dict(zip([desc[0] for desc in cursor.description], result)) - - if isinstance(result, dict): - if database_type == 'postgresql': - return result.get('mfa_secret') - else: - return result.get('MFA_Secret') - else: - print("Unexpected result format:", result) - return None - except Exception as e: - print("Error retrieving MFA secret:", e) - return None - - -def delete_mfa_secret(database_type, cnx, user_id): - if database_type == "postgresql": - cursor = cnx.cursor() - query = 'UPDATE "Users" SET MFA_Secret = NULL WHERE UserID = %s' - else: # MySQL or MariaDB - cursor = cnx.cursor(dictionary=True) - query = "UPDATE Users SET MFA_Secret = NULL WHERE UserID = %s" - - try: - cursor.execute(query, (user_id,)) - cnx.commit() - cursor.close() - return True - except Exception as e: - print("Error deleting MFA secret:", e) - return False - -def setup_timezone_info(database_type, cnx, user_id, timezone, hour_pref, date_format): - if database_type == "postgresql": - cursor = cnx.cursor() - query = ( - 'UPDATE "Users" SET Timezone = %s, TimeFormat = %s, DateFormat = %s, FirstLogin = %s WHERE UserID = %s' - ) - else: # MySQL or MariaDB - cursor = cnx.cursor(dictionary=True) - query = ( - "UPDATE Users SET Timezone = %s, TimeFormat = %s, DateFormat = %s, FirstLogin = %s WHERE UserID = %s" - ) - - try: - if database_type == "postgresql": - cursor.execute(query, (timezone, hour_pref, date_format, True, user_id)) - else: - cursor.execute(query, (timezone, hour_pref, date_format, 1, user_id)) - cnx.commit() - cursor.close() - - return True - except Exception as e: - print("Error setting up time info:", e) - return False - - - -def get_time_info(database_type, cnx, user_id): - if database_type == "postgresql": - from psycopg.rows import dict_row - cnx.row_factory = dict_row - cursor = cnx.cursor() - query = 'SELECT Timezone, TimeFormat, DateFormat FROM "Users" WHERE UserID = %s' - else: # MySQL or MariaDB - cursor = cnx.cursor(dictionary=True) - query = "SELECT Timezone, TimeFormat, DateFormat FROM Users WHERE UserID = %s" - - cursor.execute(query, (user_id,)) - result = cursor.fetchone() - cursor.close() - - if result: - # Check if result is a dict or tuple - if isinstance(result, dict): - # Handle both postgres (lowercase) and mysql (uppercase) dict keys - timezone = result.get('timezone') or result.get('Timezone') - timeformat = result.get('timeformat') or result.get('TimeFormat') - dateformat = result.get('dateformat') or result.get('DateFormat') - else: - # Handle tuple result (order should match SELECT query) - timezone, timeformat, dateformat = result - - return timezone, timeformat, dateformat - else: - return None, None, None - - -def first_login_done(database_type, cnx, user_id): - if database_type == "postgresql": - from psycopg.rows import dict_row - cnx.row_factory = dict_row - cursor = cnx.cursor() - query = 'SELECT FirstLogin FROM "Users" WHERE UserID = %s' - else: # MySQL or MariaDB - cursor = cnx.cursor(dictionary=True) - query = "SELECT FirstLogin FROM Users WHERE UserID = %s" - - try: - cursor.execute(query, (user_id,)) - result = cursor.fetchone() - cursor.close() - - if database_type == "postgresql": - - first_login = result[0] if isinstance(result, tuple) else result['firstlogin'] - else: - first_login = result[0] if isinstance(result, tuple) else result['FirstLogin'] - return first_login == 1 - except Exception as e: - print("Error fetching first login status:", e) - return False - - - -def delete_selected_episodes(cnx, database_type, selected_episodes, user_id): - cursor = cnx.cursor() - for episode_id in selected_episodes: - # Get the download ID and location from the DownloadedEpisodes table - query = ( - 'SELECT DownloadID, DownloadedLocation ' - 'FROM "DownloadedEpisodes" ' - 'WHERE EpisodeID = %s AND UserID = %s' if database_type == "postgresql" else - "SELECT DownloadID, DownloadedLocation " - "FROM DownloadedEpisodes " - "WHERE EpisodeID = %s AND UserID = %s" - ) - cursor.execute(query, (episode_id, user_id)) - result = cursor.fetchone() - - if not result: - print(f"No matching download found for episode ID {episode_id}") - continue - - download_id, downloaded_location = result - - # Delete the downloaded file - os.remove(downloaded_location) - - # Remove the entry from the DownloadedEpisodes table - query = ( - 'DELETE FROM "DownloadedEpisodes" WHERE DownloadID = %s' if database_type == "postgresql" else - "DELETE FROM DownloadedEpisodes WHERE DownloadID = %s" - ) - cursor.execute(query, (download_id,)) - cnx.commit() - print(f"Removed {cursor.rowcount} entry from the DownloadedEpisodes table.") - - # Update UserStats table to decrement EpisodesDownloaded count - query = ( - 'UPDATE "UserStats" SET EpisodesDownloaded = EpisodesDownloaded - 1 ' - 'WHERE UserID = %s' if database_type == "postgresql" else - "UPDATE UserStats SET EpisodesDownloaded = EpisodesDownloaded - 1 WHERE UserID = %s" - ) - cursor.execute(query, (user_id,)) - - cursor.close() - - return "success" - -def search_data(database_type, cnx, search_term, user_id): - if database_type == "postgresql": - from psycopg.rows import dict_row - cnx.row_factory = dict_row - cursor = cnx.cursor() - query = """ - SELECT - p.PodcastID as podcastid, - p.PodcastName as podcastname, - p.ArtworkURL as artworkurl, - p.Author as author, - p.Categories as categories, - p.Description as description, - p.EpisodeCount as episodecount, - p.FeedURL as feedurl, - p.WebsiteURL as websiteurl, - p.Explicit as explicit, - p.UserID as userid, - p.IsYouTubeChannel as is_youtube, - COALESCE(e.EpisodeID, y.VideoID) as episodeid, - COALESCE(e.EpisodeTitle, y.VideoTitle) as episodetitle, - COALESCE(e.EpisodeDescription, y.VideoDescription) as episodedescription, - COALESCE(e.EpisodeURL, y.VideoURL) as episodeurl, - COALESCE(e.EpisodeArtwork, y.ThumbnailURL) as episodeartwork, - COALESCE(e.EpisodePubDate, y.PublishedAt) as episodepubdate, - COALESCE(e.EpisodeDuration, y.Duration) as episodeduration, - CASE - WHEN y.VideoID IS NOT NULL THEN y.ListenPosition - ELSE h.ListenDuration - END as listenduration, - COALESCE(e.Completed, y.Completed) as completed, - CASE - WHEN y.VideoID IS NOT NULL THEN - CASE WHEN sv.VideoID IS NOT NULL THEN TRUE ELSE FALSE END - ELSE - CASE WHEN se.EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END - END as saved, - CASE - WHEN y.VideoID IS NOT NULL THEN - CASE WHEN eq.EpisodeID IS NOT NULL AND eq.is_youtube = TRUE THEN TRUE ELSE FALSE END - ELSE - CASE WHEN eq.EpisodeID IS NOT NULL AND eq.is_youtube = FALSE THEN TRUE ELSE FALSE END - END as queued, - CASE - WHEN y.VideoID IS NOT NULL THEN - CASE WHEN dv.VideoID IS NOT NULL THEN TRUE ELSE FALSE END - ELSE - CASE WHEN de.EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END - END as downloaded - FROM "Podcasts" p - LEFT JOIN ( - SELECT * FROM "Episodes" WHERE EpisodeTitle ILIKE %s OR EpisodeDescription ILIKE %s - ) e ON p.PodcastID = e.PodcastID - LEFT JOIN ( - SELECT * FROM "YouTubeVideos" WHERE VideoTitle ILIKE %s OR VideoDescription ILIKE %s - ) y ON p.PodcastID = y.PodcastID - LEFT JOIN "UserEpisodeHistory" h ON - (e.EpisodeID = h.EpisodeID AND h.UserID = %s) - LEFT JOIN "SavedEpisodes" se ON - (e.EpisodeID = se.EpisodeID AND se.UserID = %s) - LEFT JOIN "SavedVideos" sv ON - (y.VideoID = sv.VideoID AND sv.UserID = %s) - LEFT JOIN "EpisodeQueue" eq ON - ((e.EpisodeID = eq.EpisodeID OR y.VideoID = eq.EpisodeID) AND eq.UserID = %s) - LEFT JOIN "DownloadedEpisodes" de ON - (e.EpisodeID = de.EpisodeID AND de.UserID = %s) - LEFT JOIN "DownloadedVideos" dv ON - (y.VideoID = dv.VideoID AND dv.UserID = %s) - WHERE p.UserID = %s - AND (e.EpisodeID IS NOT NULL OR y.VideoID IS NOT NULL) - """ - else: # MySQL or MariaDB - cursor = cnx.cursor(dictionary=True) - query = """ - SELECT - p.PodcastID as podcastid, - p.PodcastName as podcastname, - p.ArtworkURL as artworkurl, - p.Author as author, - p.Categories as categories, - p.Description as description, - p.EpisodeCount as episodecount, - p.FeedURL as feedurl, - p.WebsiteURL as websiteurl, - p.Explicit as explicit, - p.UserID as userid, - p.IsYouTubeChannel as is_youtube, - COALESCE(e.EpisodeID, y.VideoID) as episodeid, - COALESCE(e.EpisodeTitle, y.VideoTitle) as episodetitle, - COALESCE(e.EpisodeDescription, y.VideoDescription) as episodedescription, - COALESCE(e.EpisodeURL, y.VideoURL) as episodeurl, - COALESCE(e.EpisodeArtwork, y.ThumbnailURL) as episodeartwork, - COALESCE(e.EpisodePubDate, y.PublishedAt) as episodepubdate, - COALESCE(e.EpisodeDuration, y.Duration) as episodeduration, - CASE - WHEN y.VideoID IS NOT NULL THEN y.ListenPosition - ELSE h.ListenDuration - END as listenduration, - COALESCE(e.Completed, y.Completed) as completed, - CASE - WHEN y.VideoID IS NOT NULL THEN - CASE WHEN sv.VideoID IS NOT NULL THEN 1 ELSE 0 END - ELSE - CASE WHEN se.EpisodeID IS NOT NULL THEN 1 ELSE 0 END - END as saved, - CASE - WHEN y.VideoID IS NOT NULL THEN - CASE WHEN eq.EpisodeID IS NOT NULL AND eq.is_youtube = 1 THEN 1 ELSE 0 END - ELSE - CASE WHEN eq.EpisodeID IS NOT NULL AND eq.is_youtube = 0 THEN 1 ELSE 0 END - END as queued, - CASE - WHEN y.VideoID IS NOT NULL THEN - CASE WHEN dv.VideoID IS NOT NULL THEN 1 ELSE 0 END - ELSE - CASE WHEN de.EpisodeID IS NOT NULL THEN 1 ELSE 0 END - END as downloaded - FROM Podcasts p - LEFT JOIN ( - SELECT * FROM Episodes WHERE EpisodeTitle LIKE %s OR EpisodeDescription LIKE %s - ) e ON p.PodcastID = e.PodcastID - LEFT JOIN ( - SELECT * FROM YouTubeVideos WHERE VideoTitle LIKE %s OR VideoDescription LIKE %s - ) y ON p.PodcastID = y.PodcastID - LEFT JOIN UserEpisodeHistory h ON - (e.EpisodeID = h.EpisodeID AND h.UserID = %s) - LEFT JOIN SavedEpisodes se ON - (e.EpisodeID = se.EpisodeID AND se.UserID = %s) - LEFT JOIN SavedVideos sv ON - (y.VideoID = sv.VideoID AND sv.UserID = %s) - LEFT JOIN EpisodeQueue eq ON - ((e.EpisodeID = eq.EpisodeID OR y.VideoID = eq.EpisodeID) AND eq.UserID = %s) - LEFT JOIN DownloadedEpisodes de ON - (e.EpisodeID = de.EpisodeID AND de.UserID = %s) - LEFT JOIN DownloadedVideos dv ON - (y.VideoID = dv.VideoID AND dv.UserID = %s) - WHERE p.UserID = %s - AND (e.EpisodeID IS NOT NULL OR y.VideoID IS NOT NULL) - """ - - # Add wildcards for the LIKE/ILIKE clause - search_term = f"%{search_term}%" - - # We now need 11 parameters: search_term (4x), user_id (7x) - params = (search_term, search_term, search_term, search_term, user_id, user_id, user_id, user_id, user_id, user_id, user_id) - - try: - start = time.time() - cursor.execute(query, params) - result = cursor.fetchall() - end = time.time() - logging.info(f"Query executed in {end - start} seconds.") - cursor.close() - - if not result: - return [] - - # Convert column names to lowercase for MySQL - result = lowercase_keys(result) - - # Post-process the results to cast boolean to integer for the 'explicit' field - if database_type == "postgresql": - for row in result: - if 'explicit' in row: - row['explicit'] = 1 if row['explicit'] else 0 - - if database_type != "postgresql": - bool_fields = ['is_youtube', 'completed', 'saved', 'queued', 'downloaded'] - for row in result: - for field in bool_fields: - if field in row: - row[field] = bool(row.get(field, 0)) - - return result - - except Exception as e: - logging.error(f"Error retrieving Podcast Episodes: {e}") - return None - - -def queue_pod(database_type, cnx, episode_id, user_id, is_youtube=False): - if database_type == "postgresql": - from psycopg.rows import dict_row - cnx.row_factory = dict_row - cursor = cnx.cursor() - query_get_max_pos = ( - 'SELECT MAX(QueuePosition) AS max_pos FROM "EpisodeQueue" ' - 'WHERE UserID = %s' - ) - else: - cursor = cnx.cursor(dictionary=True) - query_get_max_pos = ( - "SELECT MAX(QueuePosition) AS max_pos FROM EpisodeQueue " - "WHERE UserID = %s" - ) - - cursor.execute(query_get_max_pos, (user_id,)) - result = cursor.fetchone() - max_pos = result['max_pos'] if result['max_pos'] else 0 - - # Insert the new item into the queue - query_queue_pod = ( - 'INSERT INTO "EpisodeQueue"(UserID, EpisodeID, QueuePosition, is_youtube) ' - 'VALUES (%s, %s, %s, %s)' if database_type == "postgresql" else - "INSERT INTO EpisodeQueue(UserID, EpisodeID, QueuePosition, is_youtube) " - "VALUES (%s, %s, %s, %s)" - ) - - new_pos = max_pos + 1 - try: - start = time.time() - cursor.execute(query_queue_pod, (user_id, episode_id, new_pos, is_youtube)) - cnx.commit() - end = time.time() - print(f"Query executed in {end - start} seconds.") - except Exception as e: - print(f"Error queueing {'video' if is_youtube else 'episode'}:", e) - return None - return f"{'Video' if is_youtube else 'Episode'} queued successfully." - -def reorder_queued_episodes(database_type, cnx, user_id, episode_ids): - if database_type == "postgresql": - from psycopg.rows import dict_row - cnx.row_factory = dict_row - cursor = cnx.cursor() - query_update_position = ( - 'UPDATE "EpisodeQueue" SET QueuePosition = %s ' - 'WHERE UserID = %s AND EpisodeID = %s' - ) - else: # MySQL or MariaDB - cursor = cnx.cursor(dictionary=True) - query_update_position = ( - "UPDATE EpisodeQueue SET QueuePosition = %s " - "WHERE UserID = %s AND EpisodeID = %s" - ) - - try: - start = time.time() - - # Update the position of each episode in the order they appear in the list - for position, episode_id in enumerate(episode_ids, start=1): - cursor.execute(query_update_position, (position, user_id, episode_id)) - - cnx.commit() # Commit the changes - end = time.time() - print(f"Query executed in {end - start} seconds.") - return True - except Exception as e: - print("Error reordering Podcast Episodes:", e) - return False - - - -def check_queued(database_type, cnx, episode_id, user_id, is_youtube=False): - if database_type == "postgresql": - from psycopg.rows import dict_row - cnx.row_factory = dict_row - cursor = cnx.cursor() - query = """ - SELECT * FROM "EpisodeQueue" - WHERE EpisodeID = %s AND UserID = %s AND is_youtube = %s - """ - else: # MySQL or MariaDB - cursor = cnx.cursor(dictionary=True) - query = """ - SELECT * FROM EpisodeQueue - WHERE EpisodeID = %s AND UserID = %s AND is_youtube = %s - """ - cursor.execute(query, (episode_id, user_id, is_youtube)) - result = cursor.fetchone() - cursor.close() - return True if result else False - -def get_queue_value(result, key, default=None): - """ - Helper function to extract value from result set. - It handles both dictionaries and tuples. - """ - key_lower = key.lower() - if isinstance(result, dict): - return result.get(key_lower, default) - elif isinstance(result, tuple): - # Define a mapping of field names to their tuple indices for your specific queries - key_map = { - "episodeid": 0, - "queueposition": 1 - } - index = key_map.get(key_lower) - return result[index] if index is not None else default - return default - - -def remove_queued_pod(database_type, cnx, episode_id, user_id, is_youtube=False): - print(f'ep id: {episode_id}') - if database_type == "postgresql": - from psycopg.rows import dict_row - cnx.row_factory = dict_row - cursor = cnx.cursor() - if is_youtube: - get_queue_data_query = """ - SELECT "EpisodeQueue".EpisodeID, "EpisodeQueue".QueuePosition - FROM "EpisodeQueue" - INNER JOIN "YouTubeVideos" ON "EpisodeQueue".EpisodeID = "YouTubeVideos".VideoID - WHERE "YouTubeVideos".VideoID = %s AND "EpisodeQueue".UserID = %s AND "EpisodeQueue".is_youtube = TRUE - """ - else: - get_queue_data_query = """ - SELECT "EpisodeQueue".EpisodeID, "EpisodeQueue".QueuePosition - FROM "EpisodeQueue" - INNER JOIN "Episodes" ON "EpisodeQueue".EpisodeID = "Episodes".EpisodeID - WHERE "Episodes".EpisodeID = %s AND "EpisodeQueue".UserID = %s AND "EpisodeQueue".is_youtube = FALSE - """ - else: # MySQL or MariaDB - cursor = cnx.cursor(dictionary=True) - if is_youtube: - get_queue_data_query = """ - SELECT EpisodeQueue.EpisodeID, EpisodeQueue.QueuePosition - FROM EpisodeQueue - INNER JOIN YouTubeVideos ON EpisodeQueue.EpisodeID = YouTubeVideos.VideoID - WHERE YouTubeVideos.VideoID = %s AND EpisodeQueue.UserID = %s AND EpisodeQueue.is_youtube = TRUE - """ - else: - get_queue_data_query = """ - SELECT EpisodeQueue.EpisodeID, EpisodeQueue.QueuePosition - FROM EpisodeQueue - INNER JOIN Episodes ON EpisodeQueue.EpisodeID = Episodes.EpisodeID - WHERE Episodes.EpisodeID = %s AND EpisodeQueue.UserID = %s AND EpisodeQueue.is_youtube = FALSE - """ - - cursor.execute(get_queue_data_query, (episode_id, user_id)) - queue_data = cursor.fetchone() - print(f"Queue data: {queue_data}") - - if queue_data is None: - print(f"No queued {'video' if is_youtube else 'episode'} found with ID {episode_id}") - cursor.close() - return None - - removed_queue_position = queue_data['queueposition'] if database_type == "postgresql" else queue_data['QueuePosition'] - print(f'delete on the way') - - delete_query = ( - 'DELETE FROM "EpisodeQueue" WHERE UserID = %s AND EpisodeID = %s AND is_youtube = %s' if database_type == "postgresql" else - "DELETE FROM EpisodeQueue WHERE UserID = %s AND EpisodeID = %s AND is_youtube = %s" - ) - cursor.execute(delete_query, (user_id, episode_id, is_youtube)) - affected_rows = cursor.rowcount - print(f'Rows affected by delete: {affected_rows}') - - if affected_rows == 0: - print(f"No rows were deleted. UserID: {user_id}, {'VideoID' if is_youtube else 'EpisodeID'}: {episode_id}") - return {"status": "error", "message": "No matching row found for deletion"} - - print(f'{"video" if is_youtube else "episode"} deleted') - cnx.commit() - - update_queue_query = ( - 'UPDATE "EpisodeQueue" SET QueuePosition = QueuePosition - 1 WHERE UserID = %s AND QueuePosition > %s AND is_youtube = %s' if database_type == "postgresql" else - "UPDATE EpisodeQueue SET QueuePosition = QueuePosition - 1 WHERE UserID = %s AND QueuePosition > %s AND is_youtube = %s" - ) - cursor.execute(update_queue_query, (user_id, removed_queue_position, is_youtube)) - cnx.commit() - print(f"Successfully removed {'video' if is_youtube else 'episode'} from queue.") - cursor.close() - return {"data": "Successfully Removed Episode From Queue"} - - -def get_queued_episodes(database_type, cnx, user_id): - if database_type == "postgresql": - from psycopg.rows import dict_row - cnx.row_factory = dict_row - cursor = cnx.cursor() - get_queued_episodes_query = """ - SELECT * FROM ( - SELECT - "Episodes".EpisodeTitle as episodetitle, - "Podcasts".PodcastName as podcastname, - "Episodes".EpisodePubDate as episodepubdate, - "Episodes".EpisodeDescription as episodedescription, - "Episodes".EpisodeArtwork as episodeartwork, - "Episodes".EpisodeURL as episodeurl, - "EpisodeQueue".QueuePosition as queueposition, - "Episodes".EpisodeDuration as episodeduration, - "EpisodeQueue".QueueDate as queuedate, - "UserEpisodeHistory".ListenDuration as listenduration, - "Episodes".EpisodeID as episodeid, - "Episodes".Completed as completed, - CASE WHEN "SavedEpisodes".EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS saved, - TRUE as queued, - CASE WHEN "DownloadedEpisodes".EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded, - FALSE as is_youtube - FROM "EpisodeQueue" - INNER JOIN "Episodes" ON "EpisodeQueue".EpisodeID = "Episodes".EpisodeID - INNER JOIN "Podcasts" ON "Episodes".PodcastID = "Podcasts".PodcastID - LEFT JOIN "UserEpisodeHistory" ON - "EpisodeQueue".EpisodeID = "UserEpisodeHistory".EpisodeID - AND "EpisodeQueue".UserID = "UserEpisodeHistory".UserID - LEFT JOIN "SavedEpisodes" ON - "EpisodeQueue".EpisodeID = "SavedEpisodes".EpisodeID - AND "SavedEpisodes".UserID = %s - LEFT JOIN "DownloadedEpisodes" ON - "EpisodeQueue".EpisodeID = "DownloadedEpisodes".EpisodeID - AND "DownloadedEpisodes".UserID = %s - WHERE "EpisodeQueue".UserID = %s - AND "EpisodeQueue".is_youtube = FALSE - - UNION ALL - - SELECT - "YouTubeVideos".VideoTitle as episodetitle, - "Podcasts".PodcastName as podcastname, - "YouTubeVideos".PublishedAt as episodepubdate, - "YouTubeVideos".VideoDescription as episodedescription, - "YouTubeVideos".ThumbnailURL as episodeartwork, - "YouTubeVideos".VideoURL as episodeurl, - "EpisodeQueue".QueuePosition as queueposition, - "YouTubeVideos".Duration as episodeduration, - "EpisodeQueue".QueueDate as queuedate, - "YouTubeVideos".ListenPosition as listenduration, - "YouTubeVideos".VideoID as episodeid, - "YouTubeVideos".Completed as completed, - CASE WHEN "SavedVideos".VideoID IS NOT NULL THEN TRUE ELSE FALSE END AS saved, - TRUE as queued, - CASE WHEN "DownloadedVideos".VideoID IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded, - TRUE as is_youtube - FROM "EpisodeQueue" - INNER JOIN "YouTubeVideos" ON "EpisodeQueue".EpisodeID = "YouTubeVideos".VideoID - INNER JOIN "Podcasts" ON "YouTubeVideos".PodcastID = "Podcasts".PodcastID - LEFT JOIN "SavedVideos" ON - "EpisodeQueue".EpisodeID = "SavedVideos".VideoID - AND "SavedVideos".UserID = %s - LEFT JOIN "DownloadedVideos" ON - "EpisodeQueue".EpisodeID = "DownloadedVideos".VideoID - AND "DownloadedVideos".UserID = %s - WHERE "EpisodeQueue".UserID = %s - AND "EpisodeQueue".is_youtube = TRUE - ) combined - ORDER BY queueposition ASC - """ - else: # MySQL or MariaDB - cursor = cnx.cursor(dictionary=True) - get_queued_episodes_query = """ - SELECT * FROM ( - SELECT - Episodes.EpisodeTitle as episodetitle, - Podcasts.PodcastName as podcastname, - Episodes.EpisodePubDate as episodepubdate, - Episodes.EpisodeDescription as episodedescription, - Episodes.EpisodeArtwork as episodeartwork, - Episodes.EpisodeURL as episodeurl, - EpisodeQueue.QueuePosition as queueposition, - Episodes.EpisodeDuration as episodeduration, - EpisodeQueue.QueueDate as queuedate, - UserEpisodeHistory.ListenDuration as listenduration, - Episodes.EpisodeID as episodeid, - Episodes.Completed as completed, - CASE WHEN SavedEpisodes.EpisodeID IS NOT NULL THEN 1 ELSE 0 END AS saved, - 1 as queued, - CASE WHEN DownloadedEpisodes.EpisodeID IS NOT NULL THEN 1 ELSE 0 END AS downloaded, - 0 as is_youtube - FROM EpisodeQueue - INNER JOIN Episodes ON EpisodeQueue.EpisodeID = Episodes.EpisodeID - INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID - LEFT JOIN UserEpisodeHistory ON - EpisodeQueue.EpisodeID = UserEpisodeHistory.EpisodeID - AND EpisodeQueue.UserID = UserEpisodeHistory.UserID - LEFT JOIN SavedEpisodes ON - EpisodeQueue.EpisodeID = SavedEpisodes.EpisodeID - AND SavedEpisodes.UserID = %s - LEFT JOIN DownloadedEpisodes ON - EpisodeQueue.EpisodeID = DownloadedEpisodes.EpisodeID - AND DownloadedEpisodes.UserID = %s - WHERE EpisodeQueue.UserID = %s - AND EpisodeQueue.is_youtube = FALSE - - UNION ALL - - SELECT - YouTubeVideos.VideoTitle as episodetitle, - Podcasts.PodcastName as podcastname, - YouTubeVideos.PublishedAt as episodepubdate, - YouTubeVideos.VideoDescription as episodedescription, - YouTubeVideos.ThumbnailURL as episodeartwork, - YouTubeVideos.VideoURL as episodeurl, - EpisodeQueue.QueuePosition as queueposition, - YouTubeVideos.Duration as episodeduration, - EpisodeQueue.QueueDate as queuedate, - YouTubeVideos.ListenPosition as listenduration, - YouTubeVideos.VideoID as episodeid, - YouTubeVideos.Completed as completed, - CASE WHEN SavedVideos.VideoID IS NOT NULL THEN 1 ELSE 0 END AS saved, - 1 as queued, - CASE WHEN DownloadedVideos.VideoID IS NOT NULL THEN 1 ELSE 0 END AS downloaded, - 1 as is_youtube - FROM EpisodeQueue - INNER JOIN YouTubeVideos ON EpisodeQueue.EpisodeID = YouTubeVideos.VideoID - INNER JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID - LEFT JOIN SavedVideos ON - EpisodeQueue.EpisodeID = SavedVideos.VideoID - AND SavedVideos.UserID = %s - LEFT JOIN DownloadedVideos ON - EpisodeQueue.EpisodeID = DownloadedVideos.VideoID - AND DownloadedVideos.UserID = %s - WHERE EpisodeQueue.UserID = %s - AND EpisodeQueue.is_youtube = TRUE - ) combined - ORDER BY queueposition ASC - """ - - # We now need 6 user_id parameters: 3 for each union part - cursor.execute(get_queued_episodes_query, (user_id, user_id, user_id, user_id, user_id, user_id)) - queued_episodes = cursor.fetchall() - cursor.close() - queued_episodes = lowercase_keys(queued_episodes) - - if database_type != "postgresql": - bool_fields = ['completed', 'saved', 'queued', 'downloaded', 'is_youtube'] - for episode in queued_episodes: - for field in bool_fields: - if field in episode: - episode[field] = bool(episode[field]) - - return queued_episodes - -def check_episode_exists(cnx, database_type, user_id, episode_title, episode_url): - cursor = cnx.cursor() - query = """ - SELECT EXISTS( - SELECT 1 FROM "Episodes" - JOIN "Podcasts" ON "Episodes".PodcastID = "Podcasts".PodcastID - WHERE "Podcasts".UserID = %s AND "Episodes".EpisodeTitle = %s AND "Episodes".EpisodeURL = %s - ) - """ if database_type == "postgresql" else """ - SELECT EXISTS( - SELECT 1 FROM Episodes - JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID - WHERE Podcasts.UserID = %s AND Episodes.EpisodeTitle = %s AND Episodes.EpisodeURL = %s - ) - """ - cursor.execute(query, (user_id, episode_title, episode_url)) - result = cursor.fetchone() - cursor.close() - - # Check if result is a dictionary or a tuple - if isinstance(result, dict): - return result['exists'] == 1 - elif isinstance(result, tuple): - return result[0] == 1 - else: - raise TypeError("Unexpected type for 'result'") - - -def add_shared_episode(database_type, cnx, episode_id, url_key, expiration_date): - cursor = cnx.cursor() - - if database_type == "postgresql": - query = ''' - INSERT INTO "SharedEpisodes" (EpisodeID, UrlKey, ExpirationDate) - VALUES (%s, %s, %s) - ''' - else: # MySQL/MariaDB version - query = ''' - INSERT INTO SharedEpisodes (EpisodeID, UrlKey, ExpirationDate) - VALUES (%s, %s, %s) - ''' - - try: - cursor.execute(query, (episode_id, url_key, expiration_date)) - cnx.commit() # Commit the changes - cursor.close() - return True - except Exception as e: - print(f"Error sharing episode: {e}") - cursor.close() - return False - -def cleanup_old_episodes(cnx, database_type): - """ - Master cleanup function that handles both PeopleEpisodes and SharedEpisodes tables - """ - cleanup_old_people_episodes(cnx, database_type) - cleanup_expired_shared_episodes(cnx, database_type) - -def cleanup_old_people_episodes(cnx, database_type, days=30): - """ - Remove episodes from PeopleEpisodes that are older than the specified number of days - """ - cursor = cnx.cursor() - try: - if database_type == "postgresql": - delete_query = """ - DELETE FROM "PeopleEpisodes" - WHERE AddedDate < CURRENT_TIMESTAMP - INTERVAL '%s days' - """ - else: # MySQL or MariaDB - delete_query = """ - DELETE FROM PeopleEpisodes - WHERE AddedDate < DATE_SUB(NOW(), INTERVAL %s DAY) - """ - - cursor.execute(delete_query, (days,)) - deleted_count = cursor.rowcount - print(f"Cleaned up {deleted_count} episodes older than {days} days from PeopleEpisodes") - cnx.commit() - - except Exception as e: - print(f"Error during PeopleEpisodes cleanup: {str(e)}") - cnx.rollback() - finally: - cursor.close() - -def cleanup_expired_shared_episodes(cnx, database_type): - """ - Remove expired episodes from SharedEpisodes based on ExpirationDate - """ - cursor = cnx.cursor() - try: - if database_type == "postgresql": - delete_query = """ - DELETE FROM "SharedEpisodes" - WHERE ExpirationDate < CURRENT_TIMESTAMP - """ - else: # MySQL or MariaDB - delete_query = """ - DELETE FROM SharedEpisodes - WHERE ExpirationDate < NOW() - """ - - cursor.execute(delete_query) - deleted_count = cursor.rowcount - print(f"Cleaned up {deleted_count} expired episodes from SharedEpisodes") - cnx.commit() - - except Exception as e: - print(f"Error during SharedEpisodes cleanup: {str(e)}") - cnx.rollback() - finally: - cursor.close() - - -def build_playlist_query(playlist, database_type): - # Debug the incoming playlist data - print(f"DEBUG - Playlist time filter value: {playlist.get('timefilterhours')}") - print(f"DEBUG - Playlist keys: {list(playlist.keys())}") - - # Check and print the progress threshold values - progress_min = playlist.get('playprogressmin') - progress_max = playlist.get('playprogressmax') - print(f"DEBUG - Progress min value: {progress_min}") - print(f"DEBUG - Progress max value: {progress_max}") - - conditions = [] - params = [] - - # Check if this is a system playlist (owned by user 1) - is_system_playlist = playlist['userid'] == 1 and playlist['issystemplaylist'] - playlist_name = playlist.get('name', '') - - # Special case handling for playlists that need to filter by user listening history - needs_user_history = playlist_name in ['Currently Listening', 'Almost Done'] or not is_system_playlist - - # Ensure Fresh Releases has time filter set - if playlist_name == 'Fresh Releases' and playlist.get('timefilterhours') is None: - playlist['timefilterhours'] = 24 - print(f"Setting default 24 hour time filter for Fresh Releases playlist") - - if database_type == "postgresql": - # Special case for playlists that filter by user listening progress - if playlist['includepartiallyplayed'] and not playlist['includeunplayed'] and not playlist['includeplayed']: - # Base query for partially played episodes - IMPORTANT: Include all ORDER BY columns in SELECT - query = """ - SELECT DISTINCT e.episodeid, e.episodepubdate - FROM "Episodes" e - JOIN "Podcasts" p ON e.podcastid = p.podcastid - JOIN "UserEpisodeHistory" h ON e.episodeid = h.episodeid - WHERE h.listenduration > 0 - AND h.listenduration < e.episodeduration - AND e.Completed = FALSE - AND e.episodeduration > 0 - """ - params = [] - - # Add progress min filter if specified - this drives the Almost Done functionality - if progress_min is not None: - min_decimal = float(progress_min) / 100.0 - # Use %s parameter placeholder for safety - query += ' AND (h.listenduration::float / e.episodeduration::float) >= %s' - params.append(min_decimal) - print(f"Adding progress min filter: {min_decimal} ({progress_min}% complete)") - - # Add progress max filter if specified - if progress_max is not None: - max_decimal = float(progress_max) / 100.0 - query += ' AND (h.listenduration::float / e.episodeduration::float) <= %s' - params.append(max_decimal) - print(f"Adding progress max filter: {max_decimal}") - - print(f"Special query for in-progress playlist with filters") - - # Add sort order - if playlist['sortorder']: - sort_mapping = { - 'date_asc': 'e.episodepubdate ASC', - 'date_desc': 'e.episodepubdate DESC', - 'duration_asc': 'e.episodeduration ASC', - 'duration_desc': 'e.episodeduration DESC', - 'listen_progress': '(h.listenduration::float / e.episodeduration::float) DESC', - 'completion': '(h.listenduration::float / e.episodeduration::float) DESC' - } - order_by = sort_mapping.get(playlist['sortorder'], 'e.episodepubdate DESC') - query += f" ORDER BY {order_by}" - - else: - # Basic query structure depends on playlist type - if is_system_playlist: - if needs_user_history: - # System playlist that needs user listening history (e.g., Currently Listening) - query = """ - SELECT e.episodeid - FROM "Episodes" e - JOIN "Podcasts" p ON e.podcastid = p.podcastid - LEFT JOIN "UserEpisodeHistory" h ON e.episodeid = h.episodeid AND h.userid = %s - JOIN "Users" u ON u.UserID = %s - WHERE 1=1 - """ - params.extend([playlist['userid'], playlist['userid']]) - else: - # System playlist that doesn't need user history filtering (e.g., Fresh Releases) - query = """ - SELECT e.episodeid - FROM "Episodes" e - JOIN "Podcasts" p ON e.podcastid = p.podcastid - LEFT JOIN "UserEpisodeHistory" h ON e.episodeid = h.episodeid - JOIN "Users" u ON u.UserID = %s - WHERE 1=1 - """ - params.extend([playlist['userid']]) # Only needed for timezone - - print(f"System playlist detected - showing all podcasts") - else: - # User-specific playlist - only show user's podcasts - query = """ - SELECT e.episodeid - FROM "Episodes" e - JOIN "Podcasts" p ON e.podcastid = p.podcastid - LEFT JOIN "UserEpisodeHistory" h ON e.episodeid = h.episodeid AND h.userid = %s - JOIN "Users" u ON u.UserID = %s - WHERE p.UserID = %s - """ - params.extend([playlist['userid'], playlist['userid'], playlist['userid']]) - print(f"User playlist detected - only showing podcasts for user {playlist['userid']}") - - # Podcast filter for PostgreSQL - if playlist['podcastids']: - conditions.append('e.podcastid = ANY(%s)') - params.append(playlist['podcastids']) - - # Duration filters - if playlist['minduration'] is not None: - conditions.append('e.episodeduration >= %s') - params.append(playlist['minduration']) - if playlist['maxduration'] is not None: - conditions.append('e.episodeduration <= %s') - params.append(playlist['maxduration']) - - # Play state filters with progress - play_state_conditions = [] - - if playlist['includeunplayed']: - play_state_conditions.append('h.listenduration IS NULL') - - if playlist['includepartiallyplayed']: - # Base condition: episodes with some progress but not fully listened - partial_condition = '(h.listenduration > 0 AND h.listenduration < e.episodeduration AND e.Completed = FALSE)' - - # Add progress range conditions if specified - if playlist.get('playprogressmin') is not None: - min_decimal = float(playlist["playprogressmin"]) / 100.0 - partial_condition += f' AND (h.listenduration::float / NULLIF(e.episodeduration, 0)) >= {min_decimal}' - - if playlist.get('playprogressmax') is not None: - max_decimal = float(playlist["playprogressmax"]) / 100.0 - partial_condition += f' AND (h.listenduration::float / NULLIF(e.episodeduration, 0)) <= {max_decimal}' - - play_state_conditions.append(partial_condition) - - if playlist['includeplayed']: - play_state_conditions.append('h.listenduration >= e.episodeduration') - - if play_state_conditions: - conditions.append(f"({' OR '.join(play_state_conditions)})") - - # Time filter for PostgreSQL with timezone support - if playlist.get('timefilterhours') is not None: - print(f"Applying time filter of {playlist['timefilterhours']} hours with timezone support") - conditions.append(''' - e.episodepubdate AT TIME ZONE 'UTC' - AT TIME ZONE COALESCE(u.TimeZone, 'UTC') > - (CURRENT_TIMESTAMP AT TIME ZONE 'UTC' - AT TIME ZONE COALESCE(u.TimeZone, 'UTC') - INTERVAL '%s hours') - ''') - params.append(playlist['timefilterhours']) - - # Add all conditions - if conditions: - query += " AND " + " AND ".join(conditions) - - # Sorting for PostgreSQL - sort_mapping = { - 'date_asc': 'e.episodepubdate ASC', - 'date_desc': 'e.episodepubdate DESC', - 'duration_asc': 'e.episodeduration ASC', - 'duration_desc': 'e.episodeduration DESC', - 'listen_progress': '(COALESCE(h.listenduration, 0)::float / NULLIF(e.episodeduration, 0)) DESC', - 'completion': 'COALESCE(h.listenduration::float / NULLIF(e.episodeduration, 0), 0) DESC' - } - - order_by = sort_mapping.get(playlist['sortorder'], 'e.episodepubdate DESC') - if playlist['groupbypodcast']: - order_by = f'e.podcastid, {order_by}' - - query += f" ORDER BY {order_by}" - - else: # MySQL version - # Check for partially played episodes with progress threshold (Almost Done-like functionality) - if playlist['includepartiallyplayed'] and not playlist['includeunplayed'] and not playlist['includeplayed'] and playlist.get('playprogressmin') is not None and float(playlist.get('playprogressmin')) >= 75.0: - # This is the "Almost Done" pattern - episodes that are 75%+ complete but not finished - query = """ - SELECT DISTINCT e.episodeid - FROM Episodes e - JOIN Podcasts p ON e.podcastid = p.podcastid - JOIN UserEpisodeHistory h ON e.episodeid = h.episodeid - WHERE h.listenduration > 0 - AND h.listenduration < e.episodeduration - AND e.Completed = 0 - AND (h.listenduration / NULLIF(e.episodeduration, 0)) >= %s - """ - min_decimal = float(playlist["playprogressmin"]) / 100.0 - params = [min_decimal] - - # Add progress max constraint if specified - if playlist.get('playprogressmax') is not None: - max_decimal = float(playlist["playprogressmax"]) / 100.0 - query += f' AND (h.listenduration / NULLIF(e.episodeduration, 0)) <= %s' - params.append(max_decimal) - - print(f"Special query for playlist with high progress threshold ({playlist.get('playprogressmin')}%+)") - - # Check for partially played episodes without progress threshold (Currently Listening-like functionality) - elif playlist['includepartiallyplayed'] and not playlist['includeunplayed'] and not playlist['includeplayed'] and (playlist.get('playprogressmin') is None or float(playlist.get('playprogressmin')) < 75.0): - # This is the "Currently Listening" pattern - any episode that's started but not finished - query = """ - SELECT DISTINCT e.episodeid - FROM Episodes e - JOIN Podcasts p ON e.podcastid = p.podcastid - JOIN UserEpisodeHistory h ON e.episodeid = h.episodeid - WHERE h.listenduration > 0 - AND h.listenduration < e.episodeduration - AND e.Completed = 0 - """ - params = [] - - # Add progress min constraint if specified - if playlist.get('playprogressmin') is not None: - min_decimal = float(playlist["playprogressmin"]) / 100.0 - query += f' AND (h.listenduration / NULLIF(e.episodeduration, 0)) >= %s' - params.append(min_decimal) - - # Add progress max constraint if specified - if playlist.get('playprogressmax') is not None: - max_decimal = float(playlist["playprogressmax"]) / 100.0 - query += f' AND (h.listenduration / NULLIF(e.episodeduration, 0)) <= %s' - params.append(max_decimal) - - print(f"Special query for playlist with in-progress episodes") - - else: - # Basic query structure depends on playlist type - if is_system_playlist: - if needs_user_history: - # System playlist that needs user listening history (e.g., Currently Listening) - query = """ - SELECT e.episodeid - FROM Episodes e - JOIN Podcasts p ON e.podcastid = p.podcastid - LEFT JOIN UserEpisodeHistory h ON e.episodeid = h.episodeid AND h.userid = %s - JOIN Users u ON u.UserID = %s - WHERE 1=1 - """ - params.extend([playlist['userid'], playlist['userid']]) - else: - # System playlist that doesn't need user history filtering (e.g., Fresh Releases) - query = """ - SELECT e.episodeid - FROM Episodes e - JOIN Podcasts p ON e.podcastid = p.podcastid - LEFT JOIN UserEpisodeHistory h ON e.episodeid = h.episodeid - JOIN Users u ON u.UserID = %s - WHERE 1=1 - """ - params.extend([playlist['userid']]) # Only needed for timezone - - print(f"System playlist detected - showing all podcasts") - else: - # User-specific playlist - only show user's podcasts - query = """ - SELECT e.episodeid - FROM Episodes e - JOIN Podcasts p ON e.podcastid = p.podcastid - LEFT JOIN UserEpisodeHistory h ON e.episodeid = h.episodeid AND h.userid = %s - JOIN Users u ON u.UserID = %s - WHERE p.UserID = %s - """ - params.extend([playlist['userid'], playlist['userid'], playlist['userid']]) - print(f"User playlist detected - only showing podcasts for user {playlist['userid']}") - - # Podcast filter for MySQL - if playlist['podcastids']: - # Convert the PostgreSQL array to a list of integers for MySQL - if isinstance(playlist['podcastids'], list): - podcast_ids = playlist['podcastids'] - else: - # If it's a string representation of a list - import json - try: - podcast_ids = json.loads(playlist['podcastids']) - except: - # Fallback for PostgreSQL array string format like '{1,2,3}' - podcast_ids = [int(id.strip()) for id in playlist['podcastids'].strip('{}').split(',') if id.strip()] - - if len(podcast_ids) == 1: - # Simple equality for a single podcast - conditions.append('e.podcastid = %s') - params.append(podcast_ids[0]) - else: - # IN clause for multiple podcasts - placeholders = ', '.join(['%s'] * len(podcast_ids)) - conditions.append(f'e.podcastid IN ({placeholders})') - params.extend(podcast_ids) - - # Duration filters - if playlist['minduration'] is not None: - conditions.append('e.episodeduration >= %s') - params.append(playlist['minduration']) - if playlist['maxduration'] is not None: - conditions.append('e.episodeduration <= %s') - params.append(playlist['maxduration']) - - # Play state filters with progress - play_state_conditions = [] - - if playlist['includeunplayed']: - play_state_conditions.append('h.listenduration IS NULL') - - if playlist['includepartiallyplayed']: - # Base condition: episodes with some progress but not fully listened - partial_condition = '(h.listenduration > 0 AND h.listenduration < e.episodeduration AND e.Completed = 0)' - - # Add progress range conditions if specified - if playlist.get('playprogressmin') is not None: - min_decimal = float(playlist["playprogressmin"]) / 100.0 - partial_condition += f' AND (h.listenduration / NULLIF(e.episodeduration, 0)) >= {min_decimal}' - - if playlist.get('playprogressmax') is not None: - max_decimal = float(playlist["playprogressmax"]) / 100.0 - partial_condition += f' AND (h.listenduration / NULLIF(e.episodeduration, 0)) <= {max_decimal}' - - play_state_conditions.append(partial_condition) - - if playlist['includeplayed']: - play_state_conditions.append('h.listenduration >= e.episodeduration') - - if play_state_conditions: - conditions.append(f"({' OR '.join(play_state_conditions)})") - - # Time filter for MySQL with timezone support - if playlist.get('timefilterhours') is not None: - print(f"Applying time filter of {playlist['timefilterhours']} hours with timezone support") - conditions.append(''' - CONVERT_TZ(e.episodepubdate, 'UTC', COALESCE(u.TimeZone, 'UTC')) > - DATE_SUB(CONVERT_TZ(NOW(), 'UTC', COALESCE(u.TimeZone, 'UTC')), INTERVAL %s HOUR) - ''') - params.append(playlist['timefilterhours']) - - # Add all conditions - if conditions: - query += " AND " + " AND ".join(conditions) - - # Sorting for MySQL - sort_mapping = { - 'date_asc': 'e.episodepubdate ASC', - 'date_desc': 'e.episodepubdate DESC', - 'duration_asc': 'e.episodeduration ASC', - 'duration_desc': 'e.episodeduration DESC', - 'listen_progress': '(COALESCE(h.listenduration, 0) / NULLIF(e.episodeduration, 0)) DESC', - 'completion': 'COALESCE(h.listenduration / NULLIF(e.episodeduration, 0), 0) DESC' - } - - order_by = sort_mapping.get(playlist['sortorder'], 'e.episodepubdate DESC') - if playlist['groupbypodcast']: - order_by = f'e.podcastid, {order_by}' - - query += f" ORDER BY {order_by}" - - # Add limit (same for both databases) - if playlist['maxepisodes']: - query += " LIMIT %s" - params.append(playlist['maxepisodes']) - - return query, params - -def update_fresh_releases_playlist(cnx, database_type): - """ - Special function to update the Fresh Releases playlist for all users - considering their individual timezones. - """ - cursor = cnx.cursor() - try: - # First, identify the Fresh Releases playlist ID - if database_type == "postgresql": - cursor.execute(""" - SELECT PlaylistID - FROM "Playlists" - WHERE Name = 'Fresh Releases' AND IsSystemPlaylist = TRUE - """) - else: # MySQL - cursor.execute(""" - SELECT PlaylistID - FROM Playlists - WHERE Name = 'Fresh Releases' AND IsSystemPlaylist = 1 - """) - - playlist_result = cursor.fetchone() - if not playlist_result: - raise Exception("Fresh Releases playlist not found in system") - - # Handle both tuple and dict results - if isinstance(playlist_result, tuple): - playlist_id = playlist_result[0] - else: # dict - playlist_id = playlist_result["playlistid"] - - print(f"Updating Fresh Releases playlist (ID: {playlist_id})") - - # Clear existing contents from the playlist - if database_type == "postgresql": - cursor.execute('DELETE FROM "PlaylistContents" WHERE playlistid = %s', (playlist_id,)) - else: # MySQL - cursor.execute('DELETE FROM PlaylistContents WHERE playlistid = %s', (playlist_id,)) - - # Get all users and their timezones - if database_type == "postgresql": - cursor.execute('SELECT UserID, TimeZone FROM "Users"') - else: # MySQL - cursor.execute('SELECT UserID, TimeZone FROM Users') - - users = cursor.fetchall() - added_episodes = set() # Track episodes we've already added to avoid duplicates - position = 0 # For ordering episodes in the playlist - - # Process each user - for user in users: - # Handle both tuple and dict results for user data - if isinstance(user, tuple): - user_id = user[0] - timezone = user[1] or 'UTC' - else: # dict - user_id = user["userid"] - timezone = user["timezone"] or 'UTC' - - print(f"Processing user {user_id} with timezone {timezone}") - - # Get episodes from last 24 hours based on user's timezone - if database_type == "postgresql": - query = """ - SELECT e.episodeid - FROM "Episodes" e - JOIN "Podcasts" p ON e.podcastid = p.podcastid - WHERE e.episodepubdate AT TIME ZONE 'UTC' - AT TIME ZONE %s > - (CURRENT_TIMESTAMP AT TIME ZONE 'UTC' - AT TIME ZONE %s - INTERVAL '24 hours') - ORDER BY e.episodepubdate DESC - """ - cursor.execute(query, (timezone, timezone)) - else: # MySQL - query = """ - SELECT e.episodeid - FROM Episodes e - JOIN Podcasts p ON e.podcastid = p.podcastid - WHERE CONVERT_TZ(e.episodepubdate, 'UTC', %s) > - DATE_SUB(CONVERT_TZ(NOW(), 'UTC', %s), INTERVAL 24 HOUR) - ORDER BY e.episodepubdate DESC - """ - cursor.execute(query, (timezone, timezone)) - - recent_episodes = cursor.fetchall() - print(f"Found {len(recent_episodes)} recent episodes for user {user_id}") - - # Add episodes to playlist if not already added - for episode in recent_episodes: - # Handle both tuple and dict results for episode data - if isinstance(episode, tuple): - episode_id = episode[0] - else: # dict - episode_id = episode["episodeid"] - - if episode_id not in added_episodes: - if database_type == "postgresql": - cursor.execute(""" - INSERT INTO "PlaylistContents" (playlistid, episodeid, position) - VALUES (%s, %s, %s) - """, (playlist_id, episode_id, position)) - else: # MySQL - cursor.execute(""" - INSERT INTO PlaylistContents (playlistid, episodeid, position) - VALUES (%s, %s, %s) - """, (playlist_id, episode_id, position)) - - added_episodes.add(episode_id) - position += 1 - - # Update LastUpdated timestamp - if database_type == "postgresql": - cursor.execute(""" - UPDATE "Playlists" - SET lastupdated = CURRENT_TIMESTAMP - WHERE playlistid = %s - """, (playlist_id,)) - else: # MySQL - cursor.execute(""" - UPDATE Playlists - SET lastupdated = CURRENT_TIMESTAMP - WHERE playlistid = %s - """, (playlist_id,)) - - cnx.commit() - print(f"Successfully updated Fresh Releases playlist with {len(added_episodes)} unique episodes") - - except Exception as e: - print(f"ERROR updating Fresh Releases playlist: {str(e)}") - cnx.rollback() - raise - finally: - cursor.close() - - -def update_playlist_contents(cnx, database_type, playlist): - cursor = cnx.cursor() - try: - print(f"\n======= UPDATE PLAYLIST: {playlist['name']} (ID: {playlist['playlistid']}) =======") - - # Clear existing contents - database specific - if database_type == "postgresql": - cursor.execute('DELETE FROM "PlaylistContents" WHERE playlistid = %s', - (playlist['playlistid'],)) - else: # MySQL - # For MySQL, add retry logic to handle deadlocks - max_retries = 3 - retry_count = 0 - - while retry_count < max_retries: - try: - # Start a fresh transaction for each attempt - cnx.rollback() # Clear any previous transaction state - - cursor.execute('DELETE FROM PlaylistContents WHERE playlistid = %s', - (playlist['playlistid'],)) - break # Exit the retry loop if successful - except mysql.connector.errors.InternalError as e: - if "Deadlock" in str(e) and retry_count < max_retries - 1: - # If it's a deadlock and we have retries left - retry_count += 1 - print(f"Deadlock detected, retrying operation (attempt {retry_count}/{max_retries})") - # Add a small delay before retrying to reduce contention - import time - time.sleep(0.5 * retry_count) # Increasing backoff - else: - # Either not a deadlock or we've exhausted retries - raise - - print(f"Cleared existing contents for playlist {playlist['playlistid']}") - - # Build and execute query - query, params = build_playlist_query(playlist, database_type) - - # Try to create a debug query with params substituted - debug_query = query - debug_params = list(params) # Make a copy - - try: - for i, param in enumerate(debug_params): - placeholder = "%s" - if param is None: - replacement = "NULL" - elif isinstance(param, list): - if database_type == "postgresql": - replacement = f"ARRAY[{','.join(map(str, param))}]" - else: # MySQL - replacement = f"({','.join(map(str, param))})" - elif isinstance(param, str): - replacement = f"'{param}'" - else: - replacement = str(param) - - debug_query = debug_query.replace(placeholder, replacement, 1) - - print(f"DEBUG QUERY: {debug_query}") - except Exception as e: - print(f"Error creating debug query: {e}") - - # First, let's check if there are any episodes at all for this user - if database_type == "postgresql": - basic_check_query = f""" - SELECT COUNT(*) FROM "Episodes" e - JOIN "Podcasts" p ON e.podcastid = p.podcastid - WHERE p.UserID = {playlist['userid']} - """ - else: # MySQL - basic_check_query = f""" - SELECT COUNT(*) FROM Episodes e - JOIN Podcasts p ON e.podcastid = p.podcastid - WHERE p.UserID = {playlist['userid']} - """ - cursor.execute(basic_check_query) - # Handle both dictionary and tuple result formats - result = cursor.fetchone() - if isinstance(result, dict): - # Dictionary format - use first key in the dict - total_episodes = result[list(result.keys())[0]] - else: - # Tuple format - use first element - total_episodes = result[0] - - print(f"Total episodes available for user {playlist['userid']}: {total_episodes}") - - # Now execute the actual filtered query - cursor.execute(query, params) - episodes = cursor.fetchall() - episode_count = len(episodes) - print(f"Found {episode_count} episodes matching criteria for playlist {playlist['playlistid']}") - - # If we found episodes, show some details - if episode_count > 0: - # Handle both tuple and dict format episodes - episode_ids = [] - for ep in episodes[:5]: - if isinstance(ep, dict): - episode_ids.append(ep.get('episodeid')) - else: - episode_ids.append(ep[0]) - - print(f"First few episode IDs: {episode_ids}") - - # Get details for the first episode - if episode_count > 0: - if isinstance(episodes[0], dict): - first_ep_id = episodes[0].get('episodeid') - else: - first_ep_id = episodes[0][0] - - if database_type == "postgresql": - cursor.execute(""" - SELECT e.episodeid, e.episodetitle, e.episodeduration, - h.listenduration, p.podcastid, p.podcastname, p.userid - FROM "Episodes" e - JOIN "Podcasts" p ON e.podcastid = p.podcastid - LEFT JOIN "UserEpisodeHistory" h ON e.episodeid = h.episodeid AND h.userid = %s - WHERE e.episodeid = %s - """, (playlist['userid'], first_ep_id)) - else: # MySQL - cursor.execute(""" - SELECT e.episodeid, e.episodetitle, e.episodeduration, - h.listenduration, p.podcastid, p.podcastname, p.userid - FROM Episodes e - JOIN Podcasts p ON e.podcastid = p.podcastid - LEFT JOIN UserEpisodeHistory h ON e.episodeid = h.episodeid AND h.userid = %s - WHERE e.episodeid = %s - """, (playlist['userid'], first_ep_id)) - - ep_details = cursor.fetchone() - print(f"First episode details: {ep_details}") - - # Insert episodes into playlist - for position, episode in enumerate(episodes): - if isinstance(episode, dict): - episode_id = episode.get('episodeid') - else: - episode_id = episode[0] - - if database_type == "postgresql": - cursor.execute(""" - INSERT INTO "PlaylistContents" (playlistid, episodeid, position) - VALUES (%s, %s, %s) - """, (playlist['playlistid'], episode_id, position)) - else: # MySQL - cursor.execute(""" - INSERT INTO PlaylistContents (playlistid, episodeid, position) - VALUES (%s, %s, %s) - """, (playlist['playlistid'], episode_id, position)) - - # Update LastUpdated timestamp - if database_type == "postgresql": - cursor.execute(""" - UPDATE "Playlists" - SET lastupdated = CURRENT_TIMESTAMP - WHERE playlistid = %s - """, (playlist['playlistid'],)) - else: # MySQL - cursor.execute(""" - UPDATE Playlists - SET lastupdated = CURRENT_TIMESTAMP - WHERE playlistid = %s - """, (playlist['playlistid'],)) - - cnx.commit() - print(f"Successfully updated playlist {playlist['playlistid']} with {episode_count} episodes") - - except Exception as e: - print(f"ERROR updating playlist {playlist['name']}: {str(e)}") - import traceback - print(traceback.format_exc()) - cnx.rollback() - raise - finally: - cursor.close() - - -def update_all_playlists(cnx, database_type): - """ - Update all playlists based on their rules - """ - cursor = cnx.cursor() - try: - print("\n=================== PLAYLIST UPDATE STARTING ===================") - print("Starting to fetch all playlists") - - if database_type == "postgresql": - cursor.execute(''' - SELECT - playlistid, userid, name, description, issystemplaylist, - podcastids, includeunplayed, includepartiallyplayed, - includeplayed, minduration, maxduration, sortorder, - groupbypodcast, maxepisodes, playprogressmin, - playprogressmax, timefilterhours - FROM "Playlists" - ''') - else: # MySQL - cursor.execute(''' - SELECT - PlaylistID, UserID, Name, Description, IsSystemPlaylist, - PodcastIDs, IncludeUnplayed, IncludePartiallyPlayed, - IncludePlayed, MinDuration, MaxDuration, SortOrder, - GroupByPodcast, MaxEpisodes, PlayProgressMin, - PlayProgressMax, TimeFilterHours - FROM Playlists - ''') - - columns = [desc[0].lower() for desc in cursor.description] - print(f"Playlist columns: {columns}") - playlists = cursor.fetchall() - total_playlists = len(playlists) - print(f"Found {total_playlists} playlists to update") - - # Let's print info about users and their podcasts - if database_type == "postgresql": - cursor.execute(""" - SELECT userid, COUNT(DISTINCT podcastid) as podcast_count - FROM "Podcasts" - GROUP BY userid - """) - else: # MySQL - cursor.execute(""" - SELECT UserID, COUNT(DISTINCT PodcastID) as podcast_count - FROM Podcasts - GROUP BY UserID - """) - - user_podcast_counts = cursor.fetchall() - print(f"User podcast counts: {user_podcast_counts}") - - if database_type == "postgresql": - cursor.execute(""" - SELECT p.userid, p.podcastid, COUNT(e.episodeid) as episode_count - FROM "Podcasts" p - JOIN "Episodes" e ON p.podcastid = e.podcastid - GROUP BY p.userid, p.podcastid - ORDER BY p.userid, p.podcastid - """) - else: # MySQL - cursor.execute(""" - SELECT p.UserID, p.PodcastID, COUNT(e.EpisodeID) as episode_count - FROM Podcasts p - JOIN Episodes e ON p.PodcastID = e.PodcastID - GROUP BY p.UserID, p.PodcastID - ORDER BY p.UserID, p.PodcastID - """) - - podcast_episode_counts = cursor.fetchall() - print(f"First few podcast episode counts: {podcast_episode_counts[:5]}") - - # Handle Fresh Releases separately - update_fresh_releases_playlist(cnx, database_type) - - for idx, playlist in enumerate(playlists, 1): - if isinstance(playlist, tuple): - playlist_dict = dict(zip(columns, playlist)) - print(f"DEBUG - Playlist dict keys: {list(playlist_dict.keys())}") - print(f"DEBUG - Time filter value: {playlist_dict.get('timefilterhours')}") - else: - # If it's already a dict, we need to ensure keys are lowercase - playlist_dict = {k.lower(): v for k, v in playlist.items()} - print(f"DEBUG - Playlist dict keys: {list(playlist_dict.keys())}") - print(f"DEBUG - Time filter value: {playlist_dict.get('timefilterhours')}") - - # Ensure timefilterhours is properly set - if 'timefilterhours' not in playlist_dict and 'TimeFilterHours' in playlist_dict: - playlist_dict['timefilterhours'] = playlist_dict['TimeFilterHours'] - - # Skip Fresh Releases as it's handled separately - if playlist_dict.get('name') == 'Fresh Releases' and playlist_dict.get('issystemplaylist', playlist_dict.get('issystemplaylist', False)): - print(f"Skipping Fresh Releases playlist (ID: {playlist_dict.get('playlistid')}) as it's handled separately") - continue - - print(f"\nProcessing playlist {idx}/{total_playlists}: {playlist_dict.get('name')} (ID: {playlist_dict.get('playlistid')})") - print(f"UserID: {playlist_dict.get('userid')}") - - try: - update_playlist_contents(cnx, database_type, playlist_dict) - print(f"Successfully completed playlist {idx}/{total_playlists}") - except Exception as e: - print(f"Error updating playlist {idx}/{total_playlists} ID {playlist_dict.get('playlistid')}: {str(e)}") - continue - - print(f"Finished processing all {total_playlists} playlists") - print("=============== PLAYLIST UPDATE COMPLETE ===============\n") - cnx.commit() - - except Exception as e: - print(f"Error in update_all_playlists: {str(e)}") - if hasattr(e, '__traceback__'): - import traceback - print(traceback.format_exc()) - cnx.rollback() - finally: - cursor.close() - -def create_playlist(cnx, database_type, playlist_data): - """ - Create a new playlist and return its ID - """ - cursor = cnx.cursor() - try: - logging.info(f"Attempting to create playlist with data: {playlist_data}") - min_duration = playlist_data.min_duration * 60 if playlist_data.min_duration is not None else None - max_duration = playlist_data.max_duration * 60 if playlist_data.max_duration is not None else None - - # Convert podcast_ids list to appropriate format based on database type - if database_type == "postgresql": - podcast_ids = playlist_data.podcast_ids # PostgreSQL can handle list directly - else: # MySQL - convert to JSON string - import json - # Always ensure podcast_ids is a list before processing - if playlist_data.podcast_ids is None: - podcast_ids = json.dumps([]) - elif isinstance(playlist_data.podcast_ids, (list, tuple)): - podcast_ids = json.dumps(list(playlist_data.podcast_ids)) - else: - # Handle single value case - podcast_ids = json.dumps([playlist_data.podcast_ids]) - - # Create tuple of values for insert and log them - insert_values = ( - playlist_data.user_id, - playlist_data.name, - playlist_data.description, - podcast_ids, - playlist_data.include_unplayed, - playlist_data.include_partially_played, - playlist_data.include_played, - min_duration, - max_duration, - playlist_data.sort_order, - playlist_data.group_by_podcast, - playlist_data.max_episodes, - playlist_data.icon_name, - playlist_data.play_progress_min, - playlist_data.play_progress_max, - playlist_data.time_filter_hours - ) - logging.info(f"Insert values: {insert_values}") - - try: - if database_type == "postgresql": - cursor.execute(""" - INSERT INTO "Playlists" ( - UserID, - Name, - Description, - IsSystemPlaylist, - PodcastIDs, - IncludeUnplayed, - IncludePartiallyPlayed, - IncludePlayed, - MinDuration, - MaxDuration, - SortOrder, - GroupByPodcast, - MaxEpisodes, - IconName, - PlayProgressMin, - PlayProgressMax, - TimeFilterHours - ) VALUES ( - %s, %s, %s, FALSE, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s - ) RETURNING PlaylistID; - """, insert_values) - - try: - result = cursor.fetchone() - logging.info(f"Insert result: {result}") - if result is None: - raise Exception("No playlist ID returned from insert") - # Handle both dict and tuple results - if isinstance(result, dict): - playlist_id = result['playlistid'] - else: - playlist_id = result[0] - cnx.commit() - - # Get the newly created playlist details to update it - # Make sure podcast_ids is always a list for update_playlist_contents - update_podcast_ids = playlist_data.podcast_ids - if update_podcast_ids is None: - update_podcast_ids = [] - elif not isinstance(update_podcast_ids, (list, tuple)): - update_podcast_ids = [update_podcast_ids] - - playlist_dict = { - 'playlistid': playlist_id, - 'userid': playlist_data.user_id, - 'name': playlist_data.name, - 'description': playlist_data.description, - 'issystemplaylist': False, - 'podcastids': update_podcast_ids, - 'includeunplayed': playlist_data.include_unplayed, - 'includepartiallyplayed': playlist_data.include_partially_played, - 'includeplayed': playlist_data.include_played, - 'minduration': min_duration, - 'maxduration': max_duration, - 'sortorder': playlist_data.sort_order, - 'groupbypodcast': playlist_data.group_by_podcast, - 'maxepisodes': playlist_data.max_episodes, - 'playprogressmin': playlist_data.play_progress_min, - 'playprogressmax': playlist_data.play_progress_max, - 'timefilterhours': playlist_data.time_filter_hours - } - - # Update the playlist contents immediately - update_playlist_contents(cnx, database_type, playlist_dict) - - return playlist_id - except Exception as fetch_e: - logging.error(f"Error fetching result: {fetch_e}") - raise - - else: # MySQL - cursor.execute(""" - INSERT INTO Playlists ( - UserID, - Name, - Description, - IsSystemPlaylist, - PodcastIDs, - IncludeUnplayed, - IncludePartiallyPlayed, - IncludePlayed, - MinDuration, - MaxDuration, - SortOrder, - GroupByPodcast, - MaxEpisodes, - IconName, - PlayProgressMin, - PlayProgressMax, - TimeFilterHours - ) VALUES ( - %s, %s, %s, FALSE, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s - ); - """, insert_values) - - # For MySQL, we need to get the last inserted ID - playlist_id = cursor.lastrowid - if playlist_id is None: - raise Exception("No playlist ID returned from insert") - cnx.commit() - - # Get the newly created playlist details to update it - # Make sure podcast_ids is always a list for update_playlist_contents - update_podcast_ids = playlist_data.podcast_ids - if update_podcast_ids is None: - update_podcast_ids = [] - elif not isinstance(update_podcast_ids, (list, tuple)): - update_podcast_ids = [update_podcast_ids] - - playlist_dict = { - 'playlistid': playlist_id, - 'userid': playlist_data.user_id, - 'name': playlist_data.name, - 'description': playlist_data.description, - 'issystemplaylist': False, - 'podcastids': update_podcast_ids, - 'includeunplayed': playlist_data.include_unplayed, - 'includepartiallyplayed': playlist_data.include_partially_played, - 'includeplayed': playlist_data.include_played, - 'minduration': min_duration, - 'maxduration': max_duration, - 'sortorder': playlist_data.sort_order, - 'groupbypodcast': playlist_data.group_by_podcast, - 'maxepisodes': playlist_data.max_episodes, - 'playprogressmin': playlist_data.play_progress_min, - 'playprogressmax': playlist_data.play_progress_max, - 'timefilterhours': playlist_data.time_filter_hours - } - - # Update the playlist contents immediately - update_playlist_contents(cnx, database_type, playlist_dict) - - return playlist_id - - except Exception as sql_e: - logging.error(f"SQL execution error: {sql_e}") - if hasattr(sql_e, 'pgerror'): - logging.error(f"PG Error: {sql_e.pgerror}") - if hasattr(sql_e, 'diag'): - logging.error(f"Diagnostics: {sql_e.diag.message_detail}") - raise - - except Exception as e: - cnx.rollback() - logging.error(f"Detailed error creating playlist: {str(e)}") - logging.error(f"Error type: {type(e)}") - logging.error(f"Error args: {getattr(e, 'args', None)}") - raise Exception(f"Failed to create playlist: {str(e)}\nPlaylist data: {playlist_data}") - finally: - cursor.close() - -def delete_playlist(cnx, database_type, user_id, playlist_id): - """ - Delete a playlist if it belongs to the user and is not a system playlist - """ - cursor = cnx.cursor() - try: - # Check if playlist exists and belongs to user - if database_type == "postgresql": - cursor.execute(""" - SELECT IsSystemPlaylist, UserID - FROM "Playlists" - WHERE PlaylistID = %s - """, (playlist_id,)) - else: # MySQL - cursor.execute(""" - SELECT IsSystemPlaylist, UserID - FROM Playlists - WHERE PlaylistID = %s - """, (playlist_id,)) - - result = cursor.fetchone() - if not result: - raise Exception("Playlist not found") - - # Handle different result formats (tuple vs dict) - if isinstance(result, tuple): - is_system = result[0] - playlist_user_id = result[1] - else: - # For dict results, check for both capitalized and lowercase keys - if 'issystemplaylist' in result: - is_system = result['issystemplaylist'] - else: - is_system = result['IsSystemPlaylist'] - - if 'userid' in result: - playlist_user_id = result['userid'] - else: - playlist_user_id = result['UserID'] - - if is_system: - raise Exception("Cannot delete system playlists") - if playlist_user_id != user_id: - raise Exception("Unauthorized to delete this playlist") - - # Delete the playlist - if database_type == "postgresql": - cursor.execute(""" - DELETE FROM "Playlists" - WHERE PlaylistID = %s - """, (playlist_id,)) - else: # MySQL - cursor.execute(""" - DELETE FROM Playlists - WHERE PlaylistID = %s - """, (playlist_id,)) - - cnx.commit() - - except Exception as e: - cnx.rollback() - raise Exception(f"Failed to delete playlist: {str(e)}") - finally: - cursor.close() - -def normalize_playlist_data(playlist_record): - """Normalize playlist data regardless of whether it's a tuple or dict.""" - if isinstance(playlist_record, tuple): - result = { - 'playlist_id': playlist_record[0], - 'user_id': playlist_record[1], - 'name': playlist_record[2], - 'description': playlist_record[3], - 'is_system_playlist': playlist_record[4], - 'podcast_ids': playlist_record[5], - 'include_unplayed': playlist_record[6], - 'include_partially_played': playlist_record[7], - 'include_played': playlist_record[8], - 'min_duration': playlist_record[9], - 'max_duration': playlist_record[10], - 'sort_order': playlist_record[11], - 'group_by_podcast': playlist_record[12], - 'max_episodes': playlist_record[13], - 'last_updated': playlist_record[14], - 'created': playlist_record[15], - 'icon_name': playlist_record[16], - 'episode_count': playlist_record[17] - } - else: - result = { - 'playlist_id': playlist_record['playlistid'], - 'user_id': playlist_record['userid'], - 'name': playlist_record['name'], - 'description': playlist_record['description'], - 'is_system_playlist': playlist_record['issystemplaylist'], - 'podcast_ids': playlist_record['podcastids'], - 'include_unplayed': playlist_record['includeunplayed'], - 'include_partially_played': playlist_record['includepartiallyplayed'], - 'include_played': playlist_record['includeplayed'], - 'min_duration': playlist_record['minduration'], - 'max_duration': playlist_record['maxduration'], - 'sort_order': playlist_record['sortorder'], - 'group_by_podcast': playlist_record['groupbypodcast'], - 'max_episodes': playlist_record['maxepisodes'], - 'last_updated': playlist_record['lastupdated'], - 'created': playlist_record['created'], - 'icon_name': playlist_record['iconname'], - 'episode_count': playlist_record['episode_count'] - } - - # Convert null values to appropriate string representations or default values - if result['last_updated'] is None: - result['last_updated'] = "" - - if result['created'] is None: - result['created'] = "" - - if result['icon_name'] is None: - result['icon_name'] = "" # Or a default icon name like "ph-playlist" - - # Handle episode_count - ensure it's an integer - if isinstance(result['episode_count'], str): # It's coming back as a timestamp string - result['episode_count'] = 0 - - return result - -def normalize_preview_episode(episode_record): - """Normalize episode preview data regardless of whether it's a tuple or dict.""" - if isinstance(episode_record, tuple): - return { - 'title': episode_record[0], - 'artwork': episode_record[1] - } - return { - 'title': episode_record.get('episodetitle', episode_record.get('EpisodeTitle')), - 'artwork': episode_record.get('episodeartwork', episode_record.get('EpisodeArtwork')) - } - -def get_playlists(cnx, database_type, user_id): - """ - Get all playlists (system playlists and user's custom playlists) - Returns consistently formatted dict results regardless of database response format - """ - try: - if database_type == "postgresql": - # Create a cursor that returns dictionaries for PostgreSQL - cursor = cnx.cursor(row_factory=psycopg.rows.dict_row) - - # PostgreSQL query - cursor.execute(""" - WITH filtered_episodes AS ( - SELECT pc.PlaylistID, pc.EpisodeID - FROM "PlaylistContents" pc - JOIN "Episodes" e ON pc.EpisodeID = e.EpisodeID - JOIN "Podcasts" p ON e.PodcastID = p.PodcastID - WHERE p.UserID = %s - ) - SELECT - p.*, - COUNT(fe.EpisodeID)::INTEGER as episode_count, - p.IconName as icon_name - FROM "Playlists" p - LEFT JOIN filtered_episodes fe ON p.PlaylistID = fe.PlaylistID - WHERE p.IsSystemPlaylist = TRUE - OR p.UserID = %s - GROUP BY p.PlaylistID - ORDER BY p.IsSystemPlaylist DESC, p.Name ASC - """, (user_id, user_id)) - - playlists = cursor.fetchall() - - else: # MySQL - # Create a cursor for MySQL - cursor = cnx.cursor(dictionary=True) - - # MySQL query - cursor.execute(""" - WITH filtered_episodes AS ( - SELECT pc.PlaylistID, pc.EpisodeID - FROM PlaylistContents pc - JOIN Episodes e ON pc.EpisodeID = e.EpisodeID - JOIN Podcasts p ON e.PodcastID = p.PodcastID - WHERE p.UserID = %s - ) - SELECT - p.*, - COUNT(fe.EpisodeID) as episode_count, - p.IconName as icon_name - FROM Playlists p - LEFT JOIN filtered_episodes fe ON p.PlaylistID = fe.PlaylistID - WHERE p.IsSystemPlaylist = TRUE - OR p.UserID = %s - GROUP BY p.PlaylistID - ORDER BY p.IsSystemPlaylist DESC, p.Name ASC - """, (user_id, user_id)) - - playlists = cursor.fetchall() - - playlist_list = [] - for playlist_record in playlists: - # Get the podcast_ids field - raw_podcast_ids = playlist_record.get('podcastids', playlist_record.get('PodcastIDs')) - - # Process podcast_ids based on the data type and database - processed_podcast_ids = None - if raw_podcast_ids is not None: - if database_type == "postgresql": - # PostgreSQL returns a list directly - processed_podcast_ids = raw_podcast_ids - else: - # MySQL: Handle different formats - import json - - # If it's a single integer, wrap it in a list - if isinstance(raw_podcast_ids, int): - processed_podcast_ids = [raw_podcast_ids] - # If it's a single string that can be parsed as an integer - elif isinstance(raw_podcast_ids, str) and raw_podcast_ids.strip().isdigit(): - processed_podcast_ids = [int(raw_podcast_ids.strip())] - # If it's a string, try to parse it - elif isinstance(raw_podcast_ids, str): - try: - # Try to parse as JSON string - processed_podcast_ids = json.loads(raw_podcast_ids) - except json.JSONDecodeError: - # If that fails, try to handle quoted strings - try: - # Strip quotes if present - cleaned = raw_podcast_ids.strip('"\'') - # Manual parsing for array-like strings - if cleaned.startswith('[') and cleaned.endswith(']'): - items = cleaned[1:-1].split(',') - processed_podcast_ids = [int(item.strip()) for item in items if item.strip()] - else: - # For comma-separated list without brackets - processed_podcast_ids = [int(item.strip()) for item in cleaned.split(',') if item.strip()] - except (ValueError, AttributeError): - # Last resort: empty list - processed_podcast_ids = [] - else: - # If it's none of the above, keep as is - processed_podcast_ids = raw_podcast_ids - - # Make sure we always return a list - if processed_podcast_ids is not None and not isinstance(processed_podcast_ids, list): - processed_podcast_ids = [processed_podcast_ids] - - # Normalize field names to handle both PostgreSQL's lowercase and MySQL's capitalized names - playlist_dict = { - 'playlist_id': playlist_record.get('playlistid', playlist_record.get('PlaylistID')), - 'user_id': playlist_record.get('userid', playlist_record.get('UserID')), - 'name': playlist_record.get('name', playlist_record.get('Name')), - 'description': playlist_record.get('description', playlist_record.get('Description')), - 'is_system_playlist': bool(playlist_record.get('issystemplaylist', playlist_record.get('IsSystemPlaylist'))), - 'podcast_ids': processed_podcast_ids, # Use our processed value - 'include_unplayed': bool(playlist_record.get('includeunplayed', playlist_record.get('IncludeUnplayed'))), - 'include_partially_played': bool(playlist_record.get('includepartiallyplayed', playlist_record.get('IncludePartiallyPlayed'))), - 'include_played': bool(playlist_record.get('includeplayed', playlist_record.get('IncludePlayed'))), - 'min_duration': playlist_record.get('minduration', playlist_record.get('MinDuration')), - 'max_duration': playlist_record.get('maxduration', playlist_record.get('MaxDuration')), - 'sort_order': playlist_record.get('sortorder', playlist_record.get('SortOrder')), - 'group_by_podcast': bool(playlist_record.get('groupbypodcast', playlist_record.get('GroupByPodcast'))), - 'max_episodes': playlist_record.get('maxepisodes', playlist_record.get('MaxEpisodes')), - 'last_updated': playlist_record.get('lastupdated', playlist_record.get('LastUpdated', "")), - 'created': playlist_record.get('created', playlist_record.get('Created', "")), - 'icon_name': playlist_record.get('iconname', playlist_record.get('IconName', "")), - 'episode_count': int(playlist_record.get('episode_count', 0) or 0) - } - - # Get preview episodes with error handling - try: - if database_type == "postgresql": - # Use dict cursor for PostgreSQL - preview_cursor = cnx.cursor(row_factory=psycopg.rows.dict_row) - preview_cursor.execute(""" - SELECT e.EpisodeTitle as episodetitle, e.EpisodeArtwork as episodeartwork - FROM "PlaylistContents" pc - JOIN "Episodes" e ON pc.EpisodeID = e.EpisodeID - JOIN "Podcasts" p ON e.PodcastID = p.PodcastID - WHERE pc.PlaylistID = %s - AND p.UserID = %s - ORDER BY pc.Position - LIMIT 3 - """, (playlist_dict['playlist_id'], user_id)) - else: # MySQL - # Use dict cursor for MySQL - preview_cursor = cnx.cursor(dictionary=True) - preview_cursor.execute(""" - SELECT e.EpisodeTitle as episodetitle, e.EpisodeArtwork as episodeartwork - FROM PlaylistContents pc - JOIN Episodes e ON pc.EpisodeID = e.EpisodeID - JOIN Podcasts p ON e.PodcastID = p.PodcastID - WHERE pc.PlaylistID = %s - AND p.UserID = %s - ORDER BY pc.Position - LIMIT 3 - """, (playlist_dict['playlist_id'], user_id)) - - preview_episodes = preview_cursor.fetchall() - - # Normalize field names for preview episodes - playlist_dict['preview_episodes'] = [] - for ep in preview_episodes: - # Handle both PostgreSQL and MySQL column naming - title = ep.get('episodetitle', ep.get('EpisodeTitle', '')) - artwork = ep.get('episodeartwork', ep.get('EpisodeArtwork', '')) - playlist_dict['preview_episodes'].append({ - 'title': title, - 'artwork': artwork - }) - - preview_cursor.close() - except Exception as e: - print(f"Error fetching preview episodes for playlist {playlist_dict['playlist_id']}: {e}") - playlist_dict['preview_episodes'] = [] - - playlist_list.append(playlist_dict) - - return playlist_list - except Exception as e: - raise Exception(f"Failed to get playlists: {str(e)}") - finally: - if 'cursor' in locals(): - cursor.close() - -def normalize_episode(episode): - """Normalize episode data regardless of tuple or dict format""" - if isinstance(episode, tuple): - return { - 'episodeid': episode[0], - 'episodetitle': episode[1], - 'episodedescription': episode[2], - 'episodeartwork': episode[3], - 'episodepubdate': episode[4], - 'episodeurl': episode[5], - 'episodeduration': episode[6], - 'listenduration': episode[7], - 'completed': bool(episode[8]) if episode[8] is not None else False, - 'saved': bool(episode[9]) if episode[9] is not None else False, - 'queued': bool(episode[10]) if episode[10] is not None else False, - 'is_youtube': bool(episode[11]) if episode[11] is not None else False, - 'downloaded': bool(episode[12]) if episode[12] is not None else False, - 'podcastname': episode[13] - } - - # For dict case, map field names explicitly - field_mappings = { - 'episodeid': ['episodeid', 'EpisodeID'], - 'episodetitle': ['episodetitle', 'EpisodeTitle'], - 'episodedescription': ['episodedescription', 'EpisodeDescription'], - 'episodeartwork': ['episodeartwork', 'EpisodeArtwork'], - 'episodepubdate': ['episodepubdate', 'EpisodePubDate'], - 'episodeurl': ['episodeurl', 'EpisodeURL'], - 'episodeduration': ['episodeduration', 'EpisodeDuration'], - 'listenduration': ['listenduration', 'ListenDuration'], - 'completed': bool(episode['completed']) if episode['completed'] is not None else False, - 'saved': bool(episode['saved']) if episode['saved'] is not None else False, - 'queued': bool(episode['queued']) if episode['queued'] is not None else False, - 'is_youtube': bool(episode.get('isyoutube', False)), # Use get() with default False - 'downloaded': bool(episode['downloaded']) if episode['downloaded'] is not None else False, - 'podcastname': ['podcastname', 'PodcastName'] - } - - result = {} - for field, possible_keys in field_mappings.items(): - # Try all possible keys for each field - value = None - for key in possible_keys: - value = episode.get(key) - if value is not None: - break - - # Handle booleans - if field in ['completed', 'saved', 'queued', 'is_youtube', 'downloaded']: - value = value or False - - result[field] = value - - return result - -def normalize_playlist_info(playlist_info): - """Normalize playlist info data regardless of tuple or dict format""" - if isinstance(playlist_info, tuple): - return { - 'name': playlist_info[0], - 'description': playlist_info[1], - 'episode_count': playlist_info[2], - 'icon_name': playlist_info[3] - } - # For dict case, first try lowercase keys (most common) - name = playlist_info.get('name') - description = playlist_info.get('description') - episode_count = playlist_info.get('episode_count') - icon_name = playlist_info.get('iconname') # Note: this comes back as 'iconname' not 'icon_name' - - # If any are None, try uppercase keys as fallback - if name is None: - name = playlist_info.get('Name') - if description is None: - description = playlist_info.get('Description') - if episode_count is None: - episode_count = playlist_info.get('EpisodeCount') - if icon_name is None: - icon_name = playlist_info.get('IconName') - - return { - 'name': name, - 'description': description, - 'episode_count': episode_count, - 'icon_name': icon_name - } - -def get_playlist_episodes(cnx, database_type, user_id, playlist_id): - """ - Get all episodes in a playlist, applying the playlist's filters - Returns both playlist info and episodes in format matching Rust structs - """ - print(f"Starting playlist episodes fetch for playlist_id={playlist_id}") - cursor = cnx.cursor() - try: - # Get playlist info - # Get playlist info with user-specific episode count - if database_type == "postgresql": - cursor.execute(""" - SELECT - p.Name, - p.Description, - (SELECT COUNT(*) - FROM "PlaylistContents" pc - JOIN "Episodes" e ON pc.EpisodeID = e.EpisodeID - JOIN "Podcasts" pod ON e.PodcastID = pod.PodcastID - LEFT JOIN "UserEpisodeHistory" h ON e.EpisodeID = h.EpisodeID AND h.UserID = %s - WHERE pc.PlaylistID = p.PlaylistID - AND (p.IsSystemPlaylist = FALSE OR - (p.IsSystemPlaylist = TRUE AND - (h.EpisodeID IS NOT NULL OR pod.UserID = %s))) - ) as episode_count, - p.IconName, - p.IsSystemPlaylist - FROM "Playlists" p - WHERE p.PlaylistID = %s AND (p.UserID = %s OR p.IsSystemPlaylist = TRUE) - GROUP BY p.PlaylistID, p.Name, p.Description, p.IconName, p.IsSystemPlaylist - """, (user_id, user_id, playlist_id, user_id)) - # Get playlist info with user-specific episode count - else: # MySQL - cursor.execute(""" - SELECT - p.Name, - p.Description, - (SELECT COUNT(*) - FROM PlaylistContents pc - JOIN Episodes e ON pc.EpisodeID = e.EpisodeID - JOIN Podcasts pod ON e.PodcastID = pod.PodcastID - LEFT JOIN UserEpisodeHistory h ON e.EpisodeID = h.EpisodeID AND h.UserID = %s - WHERE pc.PlaylistID = p.PlaylistID - AND (p.IsSystemPlaylist = 0 OR - (p.IsSystemPlaylist = 1 AND - (h.EpisodeID IS NOT NULL OR pod.UserID = %s))) - ) as episode_count, - p.IconName, - p.IsSystemPlaylist - FROM Playlists p - WHERE p.PlaylistID = %s AND (p.UserID = %s OR p.IsSystemPlaylist = 1) - """, (user_id, user_id, playlist_id, user_id)) - - playlist_info = cursor.fetchone() - - if not playlist_info: - raise Exception(f"Playlist {playlist_id} not found or access denied") - - # Handle both tuple and dict formats for playlist info - is_system_playlist = False - if isinstance(playlist_info, tuple): - normalized_info = { - 'name': playlist_info[0], - 'description': playlist_info[1], - 'episode_count': playlist_info[2], - 'icon_name': playlist_info[3] - } - is_system_playlist = playlist_info[4] - else: - # Handle both upper and lower case keys - normalized_info = { - 'name': playlist_info.get('name') or playlist_info.get('Name'), - 'description': playlist_info.get('description') or playlist_info.get('Description'), - 'episode_count': playlist_info.get('episode_count') or playlist_info.get('episode_count'), - 'icon_name': playlist_info.get('iconname') or playlist_info.get('IconName') - } - is_system_playlist = playlist_info.get('issystemplaylist') or playlist_info.get('IsSystemPlaylist') - - print(f"Debug - playlist_info type: {type(playlist_info)}") - print(f"Debug - playlist_info content: {playlist_info}") - print(f"Debug - normalized playlist info: {normalized_info}") - print(f"Debug - is_system_playlist: {is_system_playlist}") - - # Get playlist settings - if database_type == "postgresql": - cursor.execute(""" - SELECT - IncludeUnplayed, - IncludePartiallyPlayed, - IncludePlayed, - MinDuration, - MaxDuration, - SortOrder, - GroupByPodcast, - MaxEpisodes, - PodcastIDs - FROM "Playlists" - WHERE PlaylistID = %s AND (UserID = %s OR IsSystemPlaylist = TRUE) - """, (playlist_id, user_id)) - else: # MySQL - cursor.execute(""" - SELECT - IncludeUnplayed, - IncludePartiallyPlayed, - IncludePlayed, - MinDuration, - MaxDuration, - SortOrder, - GroupByPodcast, - MaxEpisodes, - PodcastIDs - FROM Playlists - WHERE PlaylistID = %s AND (UserID = %s OR IsSystemPlaylist = 1) - """, (playlist_id, user_id)) - - playlist_settings = cursor.fetchone() - if isinstance(playlist_settings, dict): - # Handle both uppercase and lowercase keys - settings = [ - playlist_settings.get('includeunplayed', playlist_settings.get('IncludeUnplayed')), - playlist_settings.get('includepartiallyplayed', playlist_settings.get('IncludePartiallyPlayed')), - playlist_settings.get('includeplayed', playlist_settings.get('IncludePlayed')), - playlist_settings.get('minduration', playlist_settings.get('MinDuration')), - playlist_settings.get('maxduration', playlist_settings.get('MaxDuration')), - playlist_settings.get('sortorder', playlist_settings.get('SortOrder')), - playlist_settings.get('groupbypodcast', playlist_settings.get('GroupByPodcast')), - playlist_settings.get('maxepisodes', playlist_settings.get('MaxEpisodes')), - playlist_settings.get('podcastids', playlist_settings.get('PodcastIDs')) - ] - else: # tuple - settings = playlist_settings - print(f"Debug - playlist_settings type: {type(playlist_settings)}") - print(f"Debug - playlist_settings content: {playlist_settings}") - - (include_unplayed, include_partially_played, include_played, - min_duration, max_duration, sort_order, group_by_podcast, - max_episodes, podcast_ids) = settings - - # Build episode query with appropriate table names for each database - if database_type == "postgresql": - query = """ - SELECT DISTINCT - e.EpisodeID, - e.EpisodeTitle, - e.EpisodeDescription, - e.EpisodeArtwork, - e.EpisodePubDate, - e.EpisodeURL, - e.EpisodeDuration, - el.ListenDuration as ListenDuration, - CASE - WHEN el.ListenDuration >= e.EpisodeDuration THEN TRUE - ELSE FALSE - END as Completed, - es.SaveID IS NOT NULL as Saved, - eq.QueueID IS NOT NULL as Queued, - eq.is_youtube as IsYouTube, - ed.DownloadID IS NOT NULL as Downloaded, - p.PodcastName - FROM "PlaylistContents" pc - JOIN "Episodes" e ON pc.EpisodeID = e.EpisodeID - JOIN "Podcasts" p ON e.PodcastID = p.PodcastID - LEFT JOIN "UserEpisodeHistory" el ON e.EpisodeID = el.EpisodeID AND el.UserID = %s - LEFT JOIN "SavedEpisodes" es ON e.EpisodeID = es.EpisodeID AND es.UserID = %s - LEFT JOIN "EpisodeQueue" eq ON e.EpisodeID = eq.EpisodeID AND eq.UserID = %s - LEFT JOIN "DownloadedEpisodes" ed ON e.EpisodeID = ed.EpisodeID AND ed.UserID = %s - WHERE pc.PlaylistID = %s - AND (p.UserID = %s OR NOT %s) - """ - else: # MySQL - query = """ - SELECT DISTINCT - e.EpisodeID, - e.EpisodeTitle, - e.EpisodeDescription, - e.EpisodeArtwork, - e.EpisodePubDate, - e.EpisodeURL, - e.EpisodeDuration, - el.ListenDuration as ListenDuration, - CASE - WHEN el.ListenDuration >= e.EpisodeDuration THEN 1 - ELSE 0 - END as Completed, - es.SaveID IS NOT NULL as Saved, - eq.QueueID IS NOT NULL as Queued, - eq.is_youtube as IsYouTube, - ed.DownloadID IS NOT NULL as Downloaded, - p.PodcastName - FROM PlaylistContents pc - JOIN Episodes e ON pc.EpisodeID = e.EpisodeID - JOIN Podcasts p ON e.PodcastID = p.PodcastID - LEFT JOIN UserEpisodeHistory el ON e.EpisodeID = el.EpisodeID AND el.UserID = %s - LEFT JOIN SavedEpisodes es ON e.EpisodeID = es.EpisodeID AND es.UserID = %s - LEFT JOIN EpisodeQueue eq ON e.EpisodeID = eq.EpisodeID AND eq.UserID = %s - LEFT JOIN DownloadedEpisodes ed ON e.EpisodeID = ed.EpisodeID AND ed.UserID = %s - WHERE pc.PlaylistID = %s - AND (p.UserID = %s OR NOT %s) - """ - params = [user_id, user_id, user_id, user_id, playlist_id, user_id, is_system_playlist] - - # Add sorting logic - if sort_order == "date_desc": - query += " ORDER BY e.EpisodePubDate DESC" - elif sort_order == "date_asc": - query += " ORDER BY e.EpisodePubDate ASC" - elif sort_order == "duration_desc": - query += " ORDER BY e.EpisodeDuration DESC" - elif sort_order == "duration_asc": - query += " ORDER BY e.EpisodeDuration ASC" - - # Add limit if specified - if max_episodes: - query += " LIMIT %s" - params.append(max_episodes) - - print(f"Debug - final query: {query}") - print(f"Debug - final params: {params}") - - cursor.execute(query, tuple(params)) - episodes = cursor.fetchall() - print(f"Debug - episodes type: {type(episodes)}") - print(f"Debug - first episode content: {episodes[0] if episodes else None}") - print(f"Debug - number of episodes: {len(episodes)}") - - # Normalize all episodes - episode_list = [] - for episode in episodes: - if isinstance(episode, tuple): - episode_dict = { - 'episodeid': episode[0], - 'episodetitle': episode[1], - 'episodedescription': episode[2], - 'episodeartwork': episode[3], - 'episodepubdate': episode[4], - 'episodeurl': episode[5], - 'episodeduration': episode[6], - 'listenduration': episode[7], - 'completed': bool(episode[8]) if episode[8] is not None else False, - 'saved': bool(episode[9]) if episode[9] is not None else False, - 'queued': bool(episode[10]) if episode[10] is not None else False, - 'is_youtube': bool(episode[11]) if episode[11] is not None else False, - 'downloaded': bool(episode[12]) if episode[12] is not None else False, - 'podcastname': episode[13] - } - else: - # Handle both upper and lower case dictionary keys - episode_dict = { - 'episodeid': episode.get('episodeid', episode.get('EpisodeID')), - 'episodetitle': episode.get('episodetitle', episode.get('EpisodeTitle')), - 'episodedescription': episode.get('episodedescription', episode.get('EpisodeDescription')), - 'episodeartwork': episode.get('episodeartwork', episode.get('EpisodeArtwork')), - 'episodepubdate': episode.get('episodepubdate', episode.get('EpisodePubDate')), - 'episodeurl': episode.get('episodeurl', episode.get('EpisodeURL')), - 'episodeduration': episode.get('episodeduration', episode.get('EpisodeDuration')), - 'listenduration': episode.get('listenduration', episode.get('ListenDuration')), - 'completed': bool(episode.get('completed', episode.get('Completed'))) if episode.get('completed', episode.get('Completed')) is not None else False, - 'saved': bool(episode.get('saved', episode.get('Saved'))) if episode.get('saved', episode.get('Saved')) is not None else False, - 'queued': bool(episode.get('queued', episode.get('Queued'))) if episode.get('queued', episode.get('Queued')) is not None else False, - 'is_youtube': bool(episode.get('isyoutube', episode.get('IsYouTube'))) if episode.get('isyoutube', episode.get('IsYouTube')) is not None else False, - 'downloaded': bool(episode.get('downloaded', episode.get('Downloaded'))) if episode.get('downloaded', episode.get('Downloaded')) is not None else False, - 'podcastname': episode.get('podcastname', episode.get('PodcastName')) - } - episode_list.append(episode_dict) - - # Return directly matching Rust struct - no extra nesting - return { - "playlist_info": normalized_info, - "episodes": episode_list - } - - except Exception as e: - raise Exception(f"Failed to get playlist episodes: {str(e)}") - finally: - cursor.close() - -def get_episode_id_by_url_key(database_type, cnx, url_key): - cursor = cnx.cursor() - - query = ''' - SELECT EpisodeID FROM "SharedEpisodes" WHERE UrlKey = %s AND ExpirationDate > NOW() - ''' if database_type == "postgresql" else ''' - SELECT EpisodeID FROM SharedEpisodes WHERE UrlKey = %s AND ExpirationDate > NOW() - ''' - - try: - cursor.execute(query, (url_key,)) - result = cursor.fetchone() - - # Debug: print the result type and value - print(f"Result: {result}, Type: {type(result)}") - - if result: - # Safely handle result as either tuple or dict - if isinstance(result, tuple): - print('tuple') - episode_id = result[0] # Access tuple - elif isinstance(result, dict): - print('dict') - if database_type == 'postgresql': - episode_id = result['episodeid'] # Access dict - else: - episode_id = result['EpisodeID'] # Access dict - else: - episode_id = None # If somehow it's neither, default to None - else: - episode_id = None - print(episode_id) - cursor.close() - return episode_id - except Exception as e: - print(f"Error retrieving episode by URL key: {e}") - cursor.close() - return None - - - -def add_gpodder_settings(database_type, cnx, user_id, gpodder_url, gpodder_token, login_name, pod_sync_type): - print("Adding gPodder settings") - the_key = get_encryption_key(cnx, database_type) - - cursor = cnx.cursor() - from cryptography.fernet import Fernet - - encryption_key_bytes = base64.b64decode(the_key) - - cipher_suite = Fernet(encryption_key_bytes) - - # Only encrypt password if it's not None - if gpodder_token is not None: - encrypted_password = cipher_suite.encrypt(gpodder_token.encode()) - # Decode encrypted password back to string - decoded_token = encrypted_password.decode() - else: - decoded_token = None - - query = ( - 'UPDATE "Users" SET GpodderUrl = %s, GpodderLoginName = %s, GpodderToken = %s, Pod_Sync_Type = %s WHERE UserID = %s' if database_type == "postgresql" else - "UPDATE Users SET GpodderUrl = %s, GpodderLoginName = %s, GpodderToken = %s, Pod_Sync_Type = %s WHERE UserID = %s" - ) - - cursor.execute(query, (gpodder_url, login_name, decoded_token, pod_sync_type, user_id)) - - # Check if the update was successful - if cursor.rowcount == 0: - return None - - cnx.commit() # Commit changes to the database - cursor.close() - - return True - -def add_gpodder_server(database_type, cnx, user_id, gpodder_url, gpodder_username, gpodder_password): - print("Adding gPodder settings") - the_key = get_encryption_key(cnx, database_type) - - cursor = cnx.cursor() - from cryptography.fernet import Fernet - - encryption_key_bytes = base64.b64decode(the_key) - - cipher_suite = Fernet(encryption_key_bytes) - - # Only encrypt password if it's not None - if gpodder_password is not None: - encrypted_password = cipher_suite.encrypt(gpodder_password.encode()) - # Decode encrypted password back to string - decoded_token = encrypted_password.decode() - else: - decoded_token = None - - query = ( - 'UPDATE "Users" SET GpodderUrl = %s, GpodderLoginName = %s, GpodderToken = %s, Pod_Sync_Type = %s WHERE UserID = %s' if database_type == "postgresql" else - "UPDATE Users SET GpodderUrl = %s, GpodderLoginName = %s, GpodderToken = %s, Pod_Sync_Type = %s WHERE UserID = %s" - ) - pod_sync_type = "gpodder" - cursor.execute(query, (gpodder_url, gpodder_username, decoded_token, pod_sync_type, user_id)) - - # Check if the update was successful - if cursor.rowcount == 0: - return None - - cnx.commit() # Commit changes to the database - cursor.close() - - return True - - - -def get_gpodder_settings(database_type, cnx, user_id): - """Get the GPodder settings for a user with improved error handling""" - import logging - - logger = logging.getLogger(__name__) - - # Check if cnx is a valid connection object - if not hasattr(cnx, 'cursor'): - logger.error(f"Invalid database connection object: {type(cnx)}") - return {} - - cursor = cnx.cursor() - try: - query = ( - 'SELECT GpodderUrl, GpodderToken, GpodderLoginName FROM "Users" WHERE UserID = %s' if database_type == "postgresql" else - "SELECT GpodderUrl, GpodderToken, GpodderLoginName FROM Users WHERE UserID = %s" - ) - cursor.execute(query, (user_id,)) - result = cursor.fetchone() - - # Ensure result is consistent - if result: - if isinstance(result, tuple): - # Convert tuple result to a dictionary - result = { - "gpodderurl": result[0], - "gpoddertoken": result[1], - "gpodderloginname": result[2] - } - elif isinstance(result, dict): - # Normalize keys to lower case if necessary - result = {k.lower(): v for k, v in result.items()} - else: - result = {} - - # Apply lowercase keys if needed - if 'lowercase_keys' in globals(): - return lowercase_keys(result) - return result - except Exception as e: - logger.error(f"Error in get_gpodder_settings: {str(e)}") - return {} - finally: - cursor.close() - - - - -def get_nextcloud_settings(database_type, cnx, user_id): - cursor = cnx.cursor() - try: - query = ( - 'SELECT GpodderUrl, GpodderToken, GpodderLoginName FROM "Users" WHERE UserID = %s' if database_type == "postgresql" else - "SELECT GpodderUrl, GpodderToken, GpodderLoginName FROM Users WHERE UserID = %s" - ) - cursor.execute(query, (user_id,)) - result = cursor.fetchone() - - if result: - if isinstance(result, dict): - # Handle PostgreSQL dictionary result - url = result.get('gpodderurl') - token = result.get('gpoddertoken') - login = result.get('gpodderloginname') - else: - # Handle tuple result - url, token, login = result[0], result[1], result[2] - - if url and token and login: - return url, token, login - - return None, None, None - finally: - cursor.close() - -def get_gpodder_type(cnx, database_type, user_id): - cursor = cnx.cursor() - query = ( - 'SELECT Pod_Sync_Type FROM "Users" WHERE UserID = %s' if database_type == "postgresql" else - "SELECT Pod_Sync_Type FROM Users WHERE UserID = %s" - ) - cursor.execute(query, (user_id,)) - result = cursor.fetchone() - cursor.close() - - if result: - if isinstance(result, dict): - return result.get('pod_sync_type' if database_type == 'postgresql' else 'Pod_Sync_Type') - elif isinstance(result, (list, tuple)): - return result[0] - return None - - - - -def remove_gpodder_settings(database_type, cnx, user_id): - """Remove GPodder sync settings for a user""" - import logging - logger = logging.getLogger(__name__) - - cursor = cnx.cursor() - try: - # First delete any device records - if database_type == "postgresql": - devices_query = 'DELETE FROM "GpodderDevices" WHERE UserID = %s' - sync_state_query = 'DELETE FROM "GpodderSyncState" WHERE UserID = %s' - else: - devices_query = "DELETE FROM GpodderDevices WHERE UserID = %s" - sync_state_query = "DELETE FROM GpodderSyncState WHERE UserID = %s" - - cursor.execute(devices_query, (user_id,)) - cursor.execute(sync_state_query, (user_id,)) - - # Then clear GPodder settings from user record - if database_type == "postgresql": - user_query = ''' - UPDATE "Users" - SET GpodderUrl = '', GpodderLoginName = '', GpodderToken = '', Pod_Sync_Type = 'None' - WHERE UserID = %s - ''' - else: - user_query = ''' - UPDATE Users - SET GpodderUrl = '', GpodderLoginName = '', GpodderToken = '', Pod_Sync_Type = 'None' - WHERE UserID = %s - ''' - - cursor.execute(user_query, (user_id,)) - cnx.commit() - return True - except Exception as e: - logger.error(f"Error removing GPodder settings: {e}") - cnx.rollback() - return False - finally: - cursor.close() - - - -def check_gpodder_settings(database_type, cnx, user_id): - cursor = cnx.cursor() - query = ( - 'SELECT GpodderUrl, GpodderToken FROM "Users" WHERE UserID = %s' if database_type == "postgresql" else - "SELECT GpodderUrl, GpodderToken FROM Users WHERE UserID = %s" - ) - cursor.execute(query, (user_id,)) - result = cursor.fetchone() - cursor.close() - - if result: - if isinstance(result, dict): - gpodder_url = result.get('gpodderurl' if database_type == 'postgresql' else 'GpodderUrl') - gpodder_token = result.get('gpoddertoken' if database_type == 'postgresql' else 'GpodderToken') - elif isinstance(result, (list, tuple)): - gpodder_url = result[0] - gpodder_token = result[1] - - if gpodder_url and gpodder_token: - return True - - return False - - -def get_nextcloud_users(database_type, cnx): - cursor = cnx.cursor() - # Query to select users with either external sync configuration OR internal gpodder API enabled - if database_type == "postgresql": - query = """ - SELECT UserID, GpodderUrl, GpodderToken, GpodderLoginName, Pod_Sync_Type - FROM "Users" - WHERE (GpodderUrl <> '' AND GpodderToken <> '' AND GpodderLoginName <> '') - OR Pod_Sync_Type IN ('gpodder', 'both') - """ - else: # MySQL or MariaDB - query = """ - SELECT UserID, GpodderUrl, GpodderToken, GpodderLoginName, Pod_Sync_Type - FROM Users - WHERE (GpodderUrl <> '' AND GpodderToken <> '' AND GpodderLoginName <> '') - OR Pod_Sync_Type IN ('gpodder', 'both') - """ - cursor.execute(query) - # Fetch all matching records - users = cursor.fetchall() - cursor.close() - return users - - -import datetime - -def current_timestamp(): - # Return the current time in 'YYYY-MM-DDTHH:MM:SS' format, without fractional seconds or 'Z' - return datetime.datetime.now(datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%S') - -def add_podcast_to_nextcloud(cnx, database_type, gpodder_url, gpodder_login, encrypted_gpodder_token, podcast_url): - from cryptography.fernet import Fernet - from requests.auth import HTTPBasicAuth - - encryption_key = get_encryption_key(cnx, database_type) - encryption_key_bytes = base64.b64decode(encryption_key) - - cipher_suite = Fernet(encryption_key_bytes) - - # Decrypt the token - if encrypted_gpodder_token is not None: - decrypted_token_bytes = cipher_suite.decrypt(encrypted_gpodder_token.encode()) - gpodder_token = decrypted_token_bytes.decode() - else: - gpodder_token = None - - url = f"{gpodder_url}/index.php/apps/gpoddersync/subscription_change/create" - auth = HTTPBasicAuth(gpodder_login, gpodder_token) # Using Basic Auth - data = { - "add": [podcast_url], - "remove": [] - } - headers = { - "Content-Type": "application/json" - } - response = requests.post(url, json=data, headers=headers, auth=auth) - try: - response.raise_for_status() - print(f"Podcast added to Nextcloud successfully: {response.text}") - except requests.exceptions.HTTPError as e: - print(f"Failed to add podcast to Nextcloud: {e}") - print(f"Response body: {response.text}") - -def add_podcast_to_opodsync(cnx, database_type, user_id, gpodder_url, gpodder_login, gpodder_token, podcast_url, device_id="default"): - import requests - from requests.auth import HTTPBasicAuth - # Initialize response variable to None - response = None - try: - # Get user ID from gpodder_login - cursor = cnx.cursor() - try: - if database_type == "postgresql": - query = 'SELECT UserID, GpodderUrl FROM "Users" WHERE Username = %s' - else: - query = 'SELECT UserID, GpodderUrl FROM Users WHERE Username = %s' - - cursor.execute(query, (gpodder_login,)) - user_result = cursor.fetchone() - - user_id = None - user_gpodder_url = None - - if user_result: - if isinstance(user_result, dict): - user_id = user_result.get('userid') - user_gpodder_url = user_result.get('gpodderurl') - elif isinstance(user_result, tuple): - user_id = user_result[0] - user_gpodder_url = user_result[1] - finally: - cursor.close() - - # Detect if this is the internal API - is_internal_api = (gpodder_url == "http://localhost:8042") - # Create auth object - this is used for both session and direct auth - auth = HTTPBasicAuth(gpodder_login, gpodder_token) - # Create headers - add special header only for internal API - headers = {"Content-Type": "application/json"} - if is_internal_api: - headers["X-GPodder-Token"] = gpodder_token - print("Using internal API with X-GPodder-Token header") - # Prepare request data - data = { - "add": [podcast_url], - "remove": [] - } - # Try session-based auth first (works with many external servers) - try: - session = requests.Session() - login_url = f"{gpodder_url}/api/2/auth/{gpodder_login}/login.json" - print(f"Attempting session login at: {login_url}") - login_response = session.post(login_url, auth=auth, headers=headers if is_internal_api else None) - login_response.raise_for_status() - print("Session login successful for podcast add") - # Use the session to add the podcast - url = f"{gpodder_url}/api/2/subscriptions/{gpodder_login}/{device_id}.json" - print(f"Sending POST request to: {url}") - response = session.post(url, json=data, headers=headers) - response.raise_for_status() - print(f"Podcast added to oPodSync successfully using session: {response.text}") - - # If this is internal GPodder sync and we have a user ID, update UserStats - if is_internal_api and user_id is not None: - try: - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'UPDATE "UserStats" SET PodcastsAdded = PodcastsAdded + 1 WHERE UserID = %s' - else: # MySQL or MariaDB - query = "UPDATE UserStats SET PodcastsAdded = PodcastsAdded + 1 WHERE UserID = %s" - - cursor.execute(query, (user_id,)) - cnx.commit() - print(f"Incremented PodcastsAdded count for user {user_id} in UserStats table") - except Exception as stats_err: - print(f"Error updating UserStats: {stats_err}") - finally: - cursor.close() - - return response.json() - except Exception as e: - print(f"Session auth failed, trying basic auth: {str(e)}") - # Fall back to direct basic auth - url = f"{gpodder_url}/api/2/subscriptions/{gpodder_login}/{device_id}.json" - print(f"Sending direct POST request to: {url}") - print(f"Using headers: {headers}") - print(f"Using auth with username: {gpodder_login}") - response = requests.post(url, json=data, headers=headers, auth=auth) - print(f"Response status: {response.status_code}") - response.raise_for_status() - print(f"Podcast added to oPodSync successfully with basic auth: {response.text}") - - # If this is internal GPodder sync and we have a user ID, update UserStats - if is_internal_api and user_id is not None: - try: - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'UPDATE "UserStats" SET PodcastsAdded = PodcastsAdded + 1 WHERE UserID = %s' - else: # MySQL or MariaDB - query = "UPDATE UserStats SET PodcastsAdded = PodcastsAdded + 1 WHERE UserID = %s" - - cursor.execute(query, (user_id,)) - cnx.commit() - print(f"Incremented PodcastsAdded count for user {user_id} in UserStats table") - except Exception as stats_err: - print(f"Error updating UserStats: {stats_err}") - finally: - cursor.close() - - return response.json() - except Exception as e: - print(f"Failed to add podcast to oPodSync: {e}") - if response is not None: - print(f"Response body: {getattr(response, 'text', 'No response object')}") - print(f"Status code: {getattr(response, 'status_code', 'No status code')}") - # If there was a server error, try to get more information - if getattr(response, 'status_code', 0) >= 500: - print("Server returned an error. Check gpodder API logs for more details.") - else: - print("No response received (error occurred before HTTP request)") - return None - -def remove_podcast_from_nextcloud(cnx, database_type, gpodder_url, gpodder_login, encrypted_gpodder_token, podcast_url): - from cryptography.fernet import Fernet - from requests.auth import HTTPBasicAuth - - encryption_key = get_encryption_key(cnx, database_type) - encryption_key_bytes = base64.b64decode(encryption_key) - - cipher_suite = Fernet(encryption_key_bytes) - - # Decrypt the token - if encrypted_gpodder_token is not None: - decrypted_token_bytes = cipher_suite.decrypt(encrypted_gpodder_token.encode()) - gpodder_token = decrypted_token_bytes.decode() - else: - gpodder_token = None - - url = f"{gpodder_url}/index.php/apps/gpoddersync/subscription_change/create" - auth = HTTPBasicAuth(gpodder_login, gpodder_token) # Using Basic Auth - headers = { - "Content-Type": "application/json" - } - data = { - "add": [], - "remove": [podcast_url] - } - response = requests.post(url, json=data, headers=headers, auth=auth) - try: - response.raise_for_status() - print(f"Podcast removed from Nextcloud successfully: {response.text}") - except requests.exceptions.HTTPError as e: - print(f"Failed to remove podcast from Nextcloud: {e}") - print(f"Response body: {response.text}") - - -def remove_podcast_from_opodsync(cnx, database_type, user_id, gpodder_url, gpodder_login, gpodder_token, podcast_url, device_id="default"): - from requests.auth import HTTPBasicAuth - import requests - import traceback - import mysql.connector - import psycopg - - # Track if we've handled episode removal internally - episodes_handled = False - response = None - - try: - # Validate required parameters first - if not gpodder_url or not gpodder_login or not podcast_url: - error_msg = "Missing required parameters for oPodSync removal" - print(f"Failed to remove podcast from oPodSync: {error_msg}") - return False, episodes_handled - - # Check if token is provided - if gpodder_token is None: - print("No gpodder token provided") - return False, episodes_handled - - # Detect if this is the internal API - is_internal_api = (gpodder_url == "http://localhost:8042") - - # For internal API, handle episode deletion directly to avoid foreign key constraints - if is_internal_api: - print("Using internal gPodder API - handling episodes directly") - - # First, get the podcast_id for this feed URL - cursor = cnx.cursor() - try: - if database_type == "postgresql": - # PostgreSQL: Quoted table names, unquoted lowercase column names - podcast_query = 'SELECT podcastid FROM "Podcasts" WHERE feedurl = %s AND userid = %s' - else: # MySQL or MariaDB - # MySQL/MariaDB: Unquoted table and column names with proper case - podcast_query = 'SELECT PodcastID FROM Podcasts WHERE FeedURL = %s AND UserID = %s' - - cursor.execute(podcast_query, (podcast_url, user_id)) - result = cursor.fetchone() - - podcast_id = None - if result: - # Extract podcast_id based on the result type - if isinstance(result, dict): - podcast_id = result.get('podcastid') or result.get('PodcastID') - else: # tuple - podcast_id = result[0] - - if podcast_id: - print(f"Found podcast ID {podcast_id} for URL {podcast_url}") - - # Now delete all related data to handle the foreign key constraints - if database_type == "postgresql": - # PostgreSQL: Quoted table names, unquoted lowercase column names - delete_playlist_contents = 'DELETE FROM "PlaylistContents" WHERE episodeid IN (SELECT episodeid FROM "Episodes" WHERE podcastid = %s)' - delete_history = 'DELETE FROM "UserEpisodeHistory" WHERE episodeid IN (SELECT episodeid FROM "Episodes" WHERE podcastid = %s)' - delete_downloaded = 'DELETE FROM "DownloadedEpisodes" WHERE episodeid IN (SELECT episodeid FROM "Episodes" WHERE podcastid = %s)' - delete_saved = 'DELETE FROM "SavedEpisodes" WHERE episodeid IN (SELECT episodeid FROM "Episodes" WHERE podcastid = %s)' - delete_queue = 'DELETE FROM "EpisodeQueue" WHERE episodeid IN (SELECT episodeid FROM "Episodes" WHERE podcastid = %s)' - delete_episodes = 'DELETE FROM "Episodes" WHERE podcastid = %s' - delete_podcast = 'DELETE FROM "Podcasts" WHERE podcastid = %s' - update_user_stats = 'UPDATE "UserStats" SET podcastsadded = podcastsadded - 1 WHERE userid = %s' - else: # MySQL or MariaDB - # MySQL/MariaDB: Unquoted table and column names with proper case - delete_playlist_contents = 'DELETE FROM PlaylistContents WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)' - delete_history = 'DELETE FROM UserEpisodeHistory WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)' - delete_downloaded = 'DELETE FROM DownloadedEpisodes WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)' - delete_saved = 'DELETE FROM SavedEpisodes WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)' - delete_queue = 'DELETE FROM EpisodeQueue WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)' - delete_episodes = 'DELETE FROM Episodes WHERE PodcastID = %s' - delete_podcast = 'DELETE FROM Podcasts WHERE PodcastID = %s' - update_user_stats = 'UPDATE UserStats SET PodcastsAdded = PodcastsAdded - 1 WHERE UserID = %s' - - # Execute the deletion statements in order - try: - cursor.execute(delete_playlist_contents, (podcast_id,)) - print(f"Deleted playlist contents for podcast ID {podcast_id}") - - cursor.execute(delete_history, (podcast_id,)) - print(f"Deleted episode history for podcast ID {podcast_id}") - - cursor.execute(delete_downloaded, (podcast_id,)) - print(f"Deleted downloaded episodes for podcast ID {podcast_id}") - - cursor.execute(delete_saved, (podcast_id,)) - print(f"Deleted saved episodes for podcast ID {podcast_id}") - - cursor.execute(delete_queue, (podcast_id,)) - print(f"Deleted queued episodes for podcast ID {podcast_id}") - - cursor.execute(delete_episodes, (podcast_id,)) - print(f"Deleted episodes for podcast ID {podcast_id}") - - cursor.execute(delete_podcast, (podcast_id,)) - print(f"Deleted podcast with ID {podcast_id}") - - cursor.execute(update_user_stats, (user_id,)) - print(f"Updated user stats for user ID {user_id}") - - cnx.commit() - print("All database operations committed successfully") - episodes_handled = True - except (psycopg.Error, mysql.connector.Error) as db_err: - print(f"Database error during podcast deletion: {db_err}") - cnx.rollback() - # Continue with API call even if direct deletion failed - else: - print(f"Podcast ID not found for URL {podcast_url}") - except Exception as podcast_error: - print(f"Error finding podcast ID: {podcast_error}") - finally: - cursor.close() - - # Create auth object - this is used for both session and direct auth - auth = HTTPBasicAuth(gpodder_login, gpodder_token) - - # Create headers - add special header only for internal API - headers = {"Content-Type": "application/json"} - if is_internal_api: - headers["X-GPodder-Token"] = gpodder_token - print("Using internal API with X-GPodder-Token header") - - # Create a session for cookie-based auth - session = requests.Session() - - # Try to establish a session first (for PodFetch) - try: - login_url = f"{gpodder_url}/api/2/auth/{gpodder_login}/login.json" - print(f"Attempting session login at: {login_url}") - login_response = session.post(login_url, auth=auth, headers=headers if is_internal_api else None, timeout=10) - login_response.raise_for_status() - print("Session login successful for podcast removal") - - # Use the session to remove the podcast - url = f"{gpodder_url}/api/2/subscriptions/{gpodder_login}/{device_id}.json" - data = { - "add": [], - "remove": [podcast_url] - } - print(f"Sending POST request to: {url}") - response = session.post(url, json=data, headers=headers, timeout=10) - response.raise_for_status() - print(f"Podcast removed from oPodSync successfully using session: {response.text}") - return True, episodes_handled - - except requests.exceptions.RequestException as session_error: - print(f"Session auth failed, trying basic auth: {str(session_error)}") - - # Fall back to basic auth - url = f"{gpodder_url}/api/2/subscriptions/{gpodder_login}/{device_id}.json" - print(f"Sending direct POST request to: {url}") - print(f"Using headers: {headers}") - print(f"Using auth with username: {gpodder_login}") - data = { - "add": [], - "remove": [podcast_url] - } - - try: - response = requests.post(url, json=data, headers=headers, auth=auth, timeout=10) - print(f"Response status: {response.status_code}") - response.raise_for_status() - print(f"Podcast removed from oPodSync successfully with basic auth: {response.text}") - return True, episodes_handled - except requests.exceptions.RequestException as basic_auth_error: - print(f"Basic auth removal failed: {str(basic_auth_error)}") - return False, episodes_handled - - except Exception as e: - error_details = traceback.format_exc() - print(f"Failed to remove podcast from oPodSync: {str(e)}\n{error_details}") - if response is not None: - print(f"Response body: {getattr(response, 'text', 'No response object')}") - print(f"Status code: {getattr(response, 'status_code', 'No status code')}") - # If there was a server error, try to get more information - if getattr(response, 'status_code', 0) >= 500: - print("Server returned an error. Check gpodder API logs for more details.") - else: - print("No response received (error occurred before HTTP request)") - return False, episodes_handled - - - -def refresh_nextcloud_subscription(database_type, cnx, user_id, gpodder_url, encrypted_gpodder_token, gpodder_login, pod_sync_type): - # Set up logging - logging.basicConfig(level=logging.INFO) - logger = logging.getLogger(__name__) - - try: - # Fetch and decrypt token - encryption_key = get_encryption_key(cnx, database_type) - encryption_key_bytes = base64.b64decode(encryption_key) - cipher_suite = Fernet(encryption_key_bytes) - - if encrypted_gpodder_token is not None: - decrypted_token_bytes = cipher_suite.decrypt(encrypted_gpodder_token.encode()) - gpodder_token = decrypted_token_bytes.decode() - else: - gpodder_token = None - - auth = HTTPBasicAuth(gpodder_login, gpodder_token) - logger.info("Starting Nextcloud subscription refresh") - - # Get Nextcloud subscriptions - response = requests.get( - f"{gpodder_url}/index.php/apps/gpoddersync/subscriptions", - auth=auth - ) - response.raise_for_status() - - nextcloud_podcasts = response.json().get("add", []) - logger.info(f"Fetched Nextcloud podcasts: {nextcloud_podcasts}") - - # Get local podcasts - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'SELECT FeedURL FROM "Podcasts" WHERE UserID = %s' - else: - query = "SELECT FeedURL FROM Podcasts WHERE UserID = %s" - - cursor.execute(query, (user_id,)) - local_podcasts = [row[0] for row in cursor.fetchall()] - - podcasts_to_add = set(nextcloud_podcasts) - set(local_podcasts) - podcasts_to_remove = set(local_podcasts) - set(nextcloud_podcasts) - - # Track successful operations - successful_additions = set() - successful_removals = set() - - # Add new podcasts with individual error handling - logger.info("Adding new podcasts...") - for feed_url in podcasts_to_add: - try: - podcast_values = get_podcast_values(feed_url, user_id) - feed_cutoff = 30 - return_value = add_podcast(cnx, database_type, podcast_values, user_id, feed_cutoff) - if return_value: - logger.info(f"Successfully added {feed_url}") - successful_additions.add(feed_url) - else: - logger.error(f"Failed to add {feed_url}") - except Exception as e: - logger.error(f"Error processing {feed_url}: {str(e)}") - continue # Continue with next podcast even if this one fails - - # Remove podcasts with individual error handling - logger.info("Removing podcasts...") - for feed_url in podcasts_to_remove: - try: - if database_type == "postgresql": - query = 'SELECT PodcastName FROM "Podcasts" WHERE FeedURL = %s' - else: - query = "SELECT PodcastName FROM Podcasts WHERE FeedURL = %s" - - cursor.execute(query, (feed_url,)) - result = cursor.fetchone() - - if result: - podcast_name = result[0] - if remove_podcast(cnx, database_type, podcast_name, feed_url, user_id): - successful_removals.add(feed_url) - logger.info(f"Successfully removed {feed_url}") - else: - logger.error(f"Failed to remove {feed_url}") - else: - logger.warning(f"No podcast found with URL: {feed_url}") - except Exception as e: - logger.error(f"Error removing {feed_url}: {str(e)}") - continue - - cnx.commit() - cursor.close() - - # Sync changes with Nextcloud - if successful_additions or successful_removals: - try: - sync_subscription_change( - gpodder_url, - {"Authorization": f"Bearer {gpodder_token}"}, - list(successful_additions), - list(successful_removals) - ) - except Exception as e: - logger.error(f"Error syncing changes with Nextcloud: {str(e)}") - - # Process episode actions - try: - process_nextcloud_episode_actions(gpodder_url, gpodder_token, cnx, database_type, user_id) - except Exception as e: - logger.error(f"Error processing episode actions: {str(e)}") - - # Sync local episode times - try: - sync_nextcloud_episode_times(gpodder_url, gpodder_login, gpodder_token, cnx, database_type, user_id) - except Exception as e: - logger.error(f"Error syncing local episode times: {str(e)}") - - except Exception as e: - logger.error(f"Major error in refresh_nextcloud_subscription: {str(e)}") - raise - -def process_nextcloud_episode_actions(gpodder_url, gpodder_token, cnx, database_type, user_id): - logger = logging.getLogger(__name__) - - try: - # Use the correct Nextcloud endpoint - response = requests.get( - f"{gpodder_url}/index.php/apps/gpoddersync/episode_action", - headers={"Authorization": f"Bearer {gpodder_token}"} - ) - response.raise_for_status() - episode_actions = response.json() - - cursor = cnx.cursor() - - for action in episode_actions.get('actions', []): - try: - if action["action"].lower() in ["play", "update_time"]: - if "position" in action and action["position"] != -1: - episode_id = get_episode_id_by_url(cnx, database_type, action["episode"]) - if episode_id: - # Update listen duration - record_listen_duration(cnx, database_type, episode_id, user_id, int(action["position"])) - - # Check for completion, mirroring gPodder logic - if ("total" in action and action["total"] > 0 and - action["position"] >= action["total"]): - if database_type == "postgresql": - update_query = ''' - UPDATE "Episodes" - SET Completed = TRUE - WHERE EpisodeID = %s - ''' - else: - update_query = ''' - UPDATE Episodes - SET Completed = TRUE - WHERE EpisodeID = %s - ''' - cursor.execute(update_query, (episode_id,)) - cnx.commit() - logger.info(f"Marked episode {episode_id} as completed") - - logger.info(f"Recorded listen duration for episode {episode_id}") - else: - logger.warning(f"No episode ID found for URL {action['episode']}") - except Exception as e: - logger.error(f"Error processing episode action {action}: {str(e)}") - continue - - cursor.close() - except Exception as e: - logger.error(f"Error fetching episode actions: {str(e)}") - raise - -def sync_nextcloud_episode_times(gpodder_url, gpodder_login, gpodder_token, cnx, database_type, user_id, UPLOAD_BULK_SIZE=30): - logger = logging.getLogger(__name__) - - try: - local_episode_times = get_local_episode_times(cnx, database_type, user_id) - update_actions = [] - - for episode_time in local_episode_times: - # Only include episodes with valid duration data - if episode_time["episode_duration"] and episode_time["listen_duration"]: - # If episode is completed, set position equal to total duration - position = (episode_time["episode_duration"] - if episode_time["completed"] - else episode_time["listen_duration"]) - - action = { - "podcast": episode_time["podcast_url"], - "episode": episode_time["episode_url"], - "action": "play", - "timestamp": current_timestamp(), - "position": position, - "started": 0, - "total": episode_time["episode_duration"], - "guid": generate_guid(episode_time) - } - update_actions.append(action) - - # Split into chunks and process - update_actions_chunks = [ - update_actions[i:i + UPLOAD_BULK_SIZE] - for i in range(0, len(update_actions), UPLOAD_BULK_SIZE) - ] - - from urllib.parse import urljoin - for chunk in update_actions_chunks: - try: - url = urljoin(gpodder_url, "/index.php/apps/gpoddersync/episode_action/create") - response = requests.post( - url, - json=chunk, - auth=HTTPBasicAuth(gpodder_login, gpodder_token), - headers={"Accept": "application/json"} - ) - response.raise_for_status() - logger.info(f"Successfully uploaded chunk of {len(chunk)} episode times") - except Exception as e: - logger.error(f"Error uploading chunk: {str(e)}") - continue - - except Exception as e: - logger.error(f"Error syncing local episode times: {str(e)}") - raise - -def get_user_devices(cnx, database_type, user_id): - """Get all GPodder devices for a user with proper datetime conversion""" - import logging - logger = logging.getLogger(__name__) - cursor = cnx.cursor() - try: - if database_type == "postgresql": - query = ''' - SELECT DeviceID, DeviceName, DeviceType, DeviceCaption, LastSync, IsActive, IsDefault - FROM "GpodderDevices" - WHERE UserID = %s - ''' - else: - query = ''' - SELECT DeviceID, DeviceName, DeviceType, DeviceCaption, LastSync, IsActive, IsDefault - FROM GpodderDevices - WHERE UserID = %s - ''' - cursor.execute(query, (user_id,)) - devices = [] - for row in cursor.fetchall(): - if isinstance(row, dict): - # Handle dict-style result (depends on the driver) - # Convert datetime to string - last_sync = row["lastsync"].isoformat() if row["lastsync"] else None - device = { - "id": row["deviceid"], - "name": row["devicename"], - "type": row["devicetype"], - "caption": row["devicecaption"], - "last_sync": last_sync, - "is_active": row["isactive"], - "is_remote": False, - "is_default": row["isdefault"] - } - else: - # Handle tuple-style result - # Convert datetime to string - last_sync = row[4].isoformat() if row[4] else None - device = { - "id": row[0], - "name": row[1], - "type": row[2], - "caption": row[3], - "last_sync": last_sync, - "is_active": row[5], - "is_remote": False, - "is_default": row[6] if len(row) > 6 else False - } - devices.append(device) - return devices - except Exception as e: - logger.error(f"Error getting user devices: {e}") - return [] - finally: - cursor.close() - -# Add this to your database_functions/functions.py file - -def handle_remote_device(cnx, database_type, user_id, device_name): - """ - Handles setting a remote device (with negative ID) as default by creating - a local representation or using an existing one. - - Args: - cnx: Database connection - database_type: Type of database ('postgresql' or other) - user_id: User ID - device_name: Name of the remote device - - Returns: - tuple: (success: bool, message: str, device_id: int) - """ - import logging - logger = logging.getLogger(__name__) - - try: - # First check if device exists - if so, set it as default - existing_id = find_device_by_name(cnx, database_type, user_id, device_name) - - if existing_id: - # Device exists, set it as default - logger.info(f"Found existing device with name {device_name}, ID: {existing_id}") - success = set_default_gpodder_device(cnx, database_type, user_id, existing_id) - return (success, "Existing device set as default", existing_id) - - # Create new device - new_device_id = create_or_update_device( - cnx, - database_type, - user_id, - device_name, - "remote", # Type for remote devices - f"Remote device from GPodder server" - ) - - if not new_device_id: - logger.error("Failed to create device for remote device") - return (False, "Failed to create local representation of remote device", None) - - # Set as default - success = set_default_gpodder_device(cnx, database_type, user_id, new_device_id) - return (success, "Remote device created and set as default", new_device_id) - - except Exception as e: - logger.error(f"Error handling remote device: {e}") - return (False, f"Error: {str(e)}", None) - - -def find_device_by_name(cnx, database_type, user_id, device_name): - """ - Find a device by name for a specific user - - Args: - cnx: Database connection - database_type: Type of database - user_id: User ID - device_name: Device name to find - - Returns: - int: Device ID or None if not found - """ - try: - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'SELECT DeviceID FROM "GpodderDevices" WHERE UserID = %s AND DeviceName = %s' - else: - query = 'SELECT DeviceID FROM GpodderDevices WHERE UserID = %s AND DeviceName = %s' - - cursor.execute(query, (user_id, device_name)) - result = cursor.fetchone() - - if result: - if isinstance(result, tuple): - return result[0] - else: - return result["deviceid"] - return None - except Exception as e: - print(f"Error finding device by name: {e}") - return None - finally: - cursor.close() - -def create_or_update_device(cnx, database_type, user_id, device_name, device_type="desktop", device_caption=None, is_default=False): - """ - Creates a new device or updates an existing one. - If is_default is True, this device will be set as the default. - """ - try: - cursor = cnx.cursor() - - # Check if device exists - if database_type == "postgresql": - query = """ - SELECT DeviceID FROM "GpodderDevices" - WHERE UserID = %s AND DeviceName = %s - """ - else: - query = """ - SELECT DeviceID FROM GpodderDevices - WHERE UserID = %s AND DeviceName = %s - """ - - cursor.execute(query, (user_id, device_name)) - result = cursor.fetchone() - - if result: - # Device exists, update it - device_id = result[0] if isinstance(result, tuple) else result["deviceid"] - - if database_type == "postgresql": - query = """ - UPDATE "GpodderDevices" - SET DeviceType = %s, DeviceCaption = %s, LastSync = CURRENT_TIMESTAMP - WHERE DeviceID = %s - """ - else: - query = """ - UPDATE GpodderDevices - SET DeviceType = %s, DeviceCaption = %s, LastSync = CURRENT_TIMESTAMP - WHERE DeviceID = %s - """ - - cursor.execute(query, (device_type, device_caption, device_id)) - - # If this should be the default device, set it - if is_default: - set_default_gpodder_device(cnx, database_type, user_id, device_id) - - cnx.commit() - return device_id - else: - # Device doesn't exist, create it - if database_type == "postgresql": - query = """ - INSERT INTO "GpodderDevices" (UserID, DeviceName, DeviceType, DeviceCaption, IsDefault) - VALUES (%s, %s, %s, %s, %s) - RETURNING DeviceID - """ - else: - query = """ - INSERT INTO GpodderDevices (UserID, DeviceName, DeviceType, DeviceCaption, IsDefault) - VALUES (%s, %s, %s, %s, %s) - """ - - # If this is the first device for the user, make it the default - if is_default: - cursor.execute(query, (user_id, device_name, device_type, device_caption, True)) - else: - # Check if this is the first device - if database_type == "postgresql": - count_query = 'SELECT COUNT(*) as count FROM "GpodderDevices" WHERE UserID = %s' - else: - count_query = 'SELECT COUNT(*) as count FROM GpodderDevices WHERE UserID = %s' - - cursor.execute(count_query, (user_id,)) - result = cursor.fetchone() - - # Handle different result formats from different database types - if result is None: - count = 0 - elif isinstance(result, tuple): - count = result[0] - elif isinstance(result, dict) and "count" in result: - count = result["count"] - else: - # Try to get value safely - try: - count = list(result.values())[0] if result else 0 - except: - count = 0 - - # If this is the first device, make it the default - is_first_device = count == 0 - cursor.execute(query, (user_id, device_name, device_type, device_caption, is_first_device)) - - if database_type == "postgresql": - result = cursor.fetchone() - device_id = result[0] if result and isinstance(result, tuple) else (result['deviceid'] if result else None) - else: - device_id = cursor.lastrowid - - cnx.commit() - return device_id - except Exception as e: - print(f"Error creating/updating device: {e}") - cnx.rollback() - return None - finally: - cursor.close() - -def get_sync_timestamps(cnx, database_type, user_id, device_id): - """Get sync timestamps for a device, with default values if not found""" - try: - cursor = cnx.cursor() - # Handle negative device IDs (remote devices) - if device_id and device_id < 0: - print(f"Error getting sync timestamps: Device ID {device_id} is negative (remote device)") - # Return default timestamps for remote devices - return {"last_timestamp": 0, "episodes_timestamp": 0} - if database_type == "postgresql": - query = ''' - SELECT LastTimestamp, EpisodesTimestamp - FROM "GpodderSyncState" - WHERE UserID = %s AND DeviceID = %s - ''' - else: - query = ''' - SELECT LastTimestamp, EpisodesTimestamp - FROM GpodderSyncState - WHERE UserID = %s AND DeviceID = %s - ''' - cursor.execute(query, (user_id, device_id)) - result = cursor.fetchone() - if result: - if isinstance(result, tuple): - return { - "last_timestamp": result[0] or 0, - "episodes_timestamp": result[1] or 0 - } - else: - return { - "last_timestamp": result.get("lasttimestamp", 0) or 0, - "episodes_timestamp": result.get("episodestimestamp", 0) or 0 - } - else: - # No timestamps found, create default record - if database_type == "postgresql": - insert_query = ''' - INSERT INTO "GpodderSyncState" (UserID, DeviceID, LastTimestamp, EpisodesTimestamp) - VALUES (%s, %s, 0, 0) - ON CONFLICT (UserID, DeviceID) DO NOTHING - ''' - else: - # For MySQL, use INSERT IGNORE instead of ON CONFLICT - insert_query = ''' - INSERT IGNORE INTO GpodderSyncState (UserID, DeviceID, LastTimestamp, EpisodesTimestamp) - VALUES (%s, %s, 0, 0) - ''' - try: - cursor.execute(insert_query, (user_id, device_id)) - cnx.commit() - except Exception as e: - print(f"Error creating sync timestamps: {e}") - # Don't let this error abort everything - cnx.rollback() - return {"last_timestamp": 0, "episodes_timestamp": 0} - except Exception as e: - print(f"Error getting sync timestamps: {e}") - return {"last_timestamp": 0, "episodes_timestamp": 0} - finally: - cursor.close() - -def update_sync_timestamp(cnx, database_type, user_id, device_id, timestamp_type, new_timestamp): - """Update the sync timestamp for a particular user and device""" - if timestamp_type not in ["last_timestamp", "episodes_timestamp"]: - raise ValueError("Invalid timestamp_type. Must be 'last_timestamp' or 'episodes_timestamp'") - - cursor = cnx.cursor() - try: - db_column = "LastTimestamp" if timestamp_type == "last_timestamp" else "EpisodesTimestamp" - - if database_type == "postgresql": - query = f''' - UPDATE "GpodderSyncState" - SET {db_column} = %s - WHERE UserID = %s AND DeviceID = %s - ''' - else: - query = f''' - UPDATE GpodderSyncState - SET {db_column} = %s - WHERE UserID = %s AND DeviceID = %s - ''' - - cursor.execute(query, (new_timestamp, user_id, device_id)) - cnx.commit() - return True - except Exception as e: - print(f"Error updating sync timestamp: {e}") - cnx.rollback() - return False - finally: - cursor.close() - -def get_or_create_default_device(cnx, database_type, user_id): - """Get the default device for a user or create it if it doesn't exist""" - default_device_name = "pinepods_default" - - # Try to find existing default device - cursor = cnx.cursor() - try: - if database_type == "postgresql": - query = ''' - SELECT DeviceID FROM "GpodderDevices" - WHERE UserID = %s AND DeviceName = %s - ''' - else: - query = ''' - SELECT DeviceID FROM GpodderDevices - WHERE UserID = %s AND DeviceName = %s - ''' - - cursor.execute(query, (user_id, default_device_name)) - result = cursor.fetchone() - - if result: - # Default device exists - return result[0] if isinstance(result, tuple) else result["deviceid"] - else: - # Create default device - return create_or_update_device( - cnx, - database_type, - user_id, - default_device_name, - "desktop", - "Pinepods Default Device" - ) - except Exception as e: - logger.error(f"Error getting/creating default device: {e}") - return None - finally: - cursor.close() - -def get_current_timestamp(): - """Get current timestamp in format expected by gpodder API""" - return int(time.time()) - - -def create_or_get_gpodder_device(cnx, database_type, user_id, device_name, device_type, device_caption): - """ - Create a gpodder device if it doesn't exist, or get its ID if it does - - Args: - cnx: Database connection - database_type: Type of database (postgresql or mysql) - user_id: User ID - device_name: Device name - device_type: Device type (server, desktop, mobile, etc.) - device_caption: Human-readable device caption - - Returns: - Device ID if successful, None if failed - """ - try: - cursor = cnx.cursor() - - # Check if device exists - if database_type == "postgresql": - query = 'SELECT DeviceID FROM "GpodderDevices" WHERE UserID = %s AND DeviceName = %s' - else: - query = "SELECT DeviceID FROM GpodderDevices WHERE UserID = %s AND DeviceName = %s" - - cursor.execute(query, (user_id, device_name)) - device_result = cursor.fetchone() - - if device_result: - # Device exists, return its ID - if isinstance(device_result, tuple): - device_id = device_result[0] - else: - # For dict result, use the correct column name case - device_id = device_result["DeviceID"] - print(f"Using existing gpodder device with ID: {device_id}") - else: - # Create device record - if database_type == "postgresql": - query = ''' - INSERT INTO "GpodderDevices" - (UserID, DeviceName, DeviceType, DeviceCaption, IsActive, LastSync) - VALUES (%s, %s, %s, %s, TRUE, CURRENT_TIMESTAMP) - RETURNING DeviceID - ''' - else: - query = ''' - INSERT INTO GpodderDevices - (UserID, DeviceName, DeviceType, DeviceCaption, IsActive, LastSync) - VALUES (%s, %s, %s, %s, TRUE, NOW()) - ''' - - cursor.execute(query, (user_id, device_name, device_type, device_caption)) - - if database_type == "postgresql": - device_id = cursor.fetchone()[0] - else: - device_id = cursor.lastrowid - - print(f"Created gpodder device with ID: {device_id}") - - # Also create device sync state entry - if database_type == "postgresql": - state_query = ''' - INSERT INTO "GpodderSyncDeviceState" (UserID, DeviceID) - VALUES (%s, %s) - ON CONFLICT (UserID, DeviceID) DO NOTHING - ''' - else: - state_query = ''' - INSERT IGNORE INTO GpodderSyncDeviceState (UserID, DeviceID) - VALUES (%s, %s) - ''' - - cursor.execute(state_query, (user_id, device_id)) - - cnx.commit() - cursor.close() - return device_id - - except Exception as e: - print(f"Error in create_or_get_gpodder_device: {e}") - if 'cursor' in locals(): - cursor.close() - return None - -def generate_secure_token(length=64): - """ - Generate a secure random token for internal authentication - - Args: - length: Length of the token (default: 64) - - Returns: - Secure random token string - """ - import secrets - import string - - alphabet = string.ascii_letters + string.digits - return ''.join(secrets.choice(alphabet) for _ in range(length)) - -def set_gpodder_internal_sync(cnx, database_type, user_id): - """ - Set up internal gpodder sync for a user with a plain, unencrypted token - """ - try: - # Get the username - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'SELECT Username, Pod_Sync_Type FROM "Users" WHERE UserID = %s' - else: - query = "SELECT Username, Pod_Sync_Type FROM Users WHERE UserID = %s" - cursor.execute(query, (user_id,)) - user_info = cursor.fetchone() - cursor.close() - if not user_info: - print(f"User not found for ID: {user_id}") - return None - username = user_info[0] if isinstance(user_info, tuple) else user_info["username"] - current_sync_type = user_info[1] if isinstance(user_info, tuple) else user_info["pod_sync_type"] - - # Generate a new sync type based on current - new_sync_type = current_sync_type - if current_sync_type == "external": - new_sync_type = "both" - elif current_sync_type == "None" or current_sync_type is None: - new_sync_type = "gpodder" - - # Generate a secure internal token - PLAIN TEXT, NO ENCRYPTION - import secrets - import string - alphabet = string.ascii_letters + string.digits - internal_token = ''.join(secrets.choice(alphabet) for _ in range(64)) - - # Set up the local gpodder API details - local_gpodder_url = "http://localhost:8042" # Internal API URL - - # Store the plain token in the database - if database_type == "postgresql": - query = ''' - UPDATE "Users" - SET GpodderUrl = %s, GpodderToken = %s, GpodderLoginName = %s, Pod_Sync_Type = %s - WHERE UserID = %s - ''' - else: - query = ''' - UPDATE Users - SET GpodderUrl = %s, GpodderToken = %s, GpodderLoginName = %s, Pod_Sync_Type = %s - WHERE UserID = %s - ''' - cursor = cnx.cursor() - cursor.execute(query, (local_gpodder_url, internal_token, username, new_sync_type, user_id)) - cnx.commit() - cursor.close() - - # Create a default device for this user using the gPodder API - default_device_name = f"pinepods-internal-{user_id}" - - # Create the device using the gPodder API - import requests - from requests.auth import HTTPBasicAuth - - # Use the API to register a device - device_data = { - "caption": f"PinePods Internal Device {user_id}", - "type": "server" - } - - try: - # First, check if the device already exists - device_list_url = f"{local_gpodder_url}/api/2/devices/{username}.json" - response = requests.get( - device_list_url, - auth=HTTPBasicAuth(username, internal_token) - ) - - # If we can't get device list, create a new one anyway - existing_device_id = None - if response.status_code == 200: - devices = response.json() - for device in devices: - if device.get("id") == default_device_name: - existing_device_id = device.get("id") - print(f"Found existing device with ID: {existing_device_id}") - break - - # If device doesn't exist, create it - if not existing_device_id: - device_url = f"{local_gpodder_url}/api/2/devices/{username}/{default_device_name}.json" - response = requests.post( - device_url, - json=device_data, - auth=HTTPBasicAuth(username, internal_token) - ) - - if response.status_code in [200, 201]: - print(f"Created device with ID: {default_device_name}") - else: - print(f"Failed to create device: {response.status_code} - {response.text}") - # Continue anyway - the API might create the device on first sync - - # Return the device info - return { - "device_name": default_device_name, - "device_id": user_id, # Use user_id as a fallback/reference - "success": True - } - - except Exception as device_err: - print(f"Error creating device via API: {device_err}") - # Even if device creation fails, still return success - return { - "device_name": default_device_name, - "device_id": user_id, - "success": True - } - - except Exception as e: - print(f"Error in set_gpodder_internal_sync: {e}") - return None - -def disable_gpodder_internal_sync(cnx, database_type, user_id): - """ - Disable internal gpodder sync for a user - - Args: - cnx: Database connection - database_type: Type of database (postgresql or mysql) - user_id: User ID - - Returns: - True if successful, False if failed - """ - try: - # Get current gpodder settings - user_data = get_user_gpodder_status(cnx, database_type, user_id) - if not user_data: - print(f"User data not found for ID: {user_id}") - return False - - current_sync_type = user_data["sync_type"] - - # Determine new sync type - new_sync_type = current_sync_type - if current_sync_type == "both": - new_sync_type = "external" - elif current_sync_type == "gpodder": - new_sync_type = "None" - - # If internal API is being used, clear the settings - if user_data.get("gpodder_url") == "http://localhost:8042": - success = add_gpodder_settings( - database_type, - cnx, - user_id, - "", # Clear URL - "", # Clear token - "", # Clear login - new_sync_type - ) - - if not success: - print(f"Failed to clear gpodder settings for user: {user_id}") - return False - else: - # Just update the sync type - success = update_user_gpodder_sync(cnx, database_type, user_id, new_sync_type) - if not success: - print(f"Failed to update gpodder sync type for user: {user_id}") - return False - - return True - - except Exception as e: - print(f"Error in disable_gpodder_internal_sync: {e}") - return False - -def refresh_gpodder_subscription(database_type, cnx, user_id, gpodder_url, encrypted_gpodder_token, - gpodder_login, pod_sync_type, device_id=None, device_name=None, is_remote=False): - """Refreshes podcasts from GPodder with proper device handling""" - from cryptography.fernet import Fernet - import logging - import requests - import base64 - from requests.auth import HTTPBasicAuth - - # Set up logging - logging.basicConfig(level=logging.INFO) - logger = logging.getLogger(__name__) - - try: - # More detailed logging for debugging - print(f"Starting refresh with parameters: user_id={user_id}, gpodder_url={gpodder_url}, " + - f"pod_sync_type={pod_sync_type}, device_id={device_id}, device_name={device_name}, " + - f"is_remote={is_remote}") - - # Flag to identify internal API calls - is_internal_api = (gpodder_url == "http://localhost:8042") - print(f"Is internal API: {is_internal_api}") - - # Determine which device to use for GPodder API calls - actual_device_name = None - - # Handle device name/id logic - if is_remote and device_name: - # If it's a remote device, use the provided device name directly - print(f"Using remote device name: {device_name}") - - # Create a local representation of the remote device - success, message, local_device_id = handle_remote_device(cnx, database_type, user_id, device_name) - if success: - print(f"Created/found local device for remote device: {local_device_id}") - # Use the local device ID instead of -1 - device_id = local_device_id - actual_device_name = device_name - else: - print(f"Failed to handle remote device: {message}") - # Proceed with just the name, but device_id will still be -1 which might cause problems - actual_device_name = device_name - elif device_id: - # If a specific device ID is provided, look it up in the database - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'SELECT DeviceName FROM "GpodderDevices" WHERE DeviceID = %s' - else: - query = "SELECT DeviceName FROM GpodderDevices WHERE DeviceID = %s" - cursor.execute(query, (device_id,)) - result = cursor.fetchone() - cursor.close() - - if result: - actual_device_name = result[0] if isinstance(result, tuple) else result["devicename"] - logger.info(f"Using device from database: {actual_device_name} (ID: {device_id})") - else: - logger.warning(f"Device ID {device_id} not found in database, falling back to default") - default_device = get_default_gpodder_device(cnx, database_type, user_id) - if default_device: - device_id = default_device["id"] - actual_device_name = default_device["name"] - print(f"Using default device: {actual_device_name} (ID: {device_id})") - else: - # No default device, create one with proper naming pattern - if is_internal_api: - # Use internal device naming pattern - device_name_to_use = f"pinepods-internal-{user_id}" - device_type_to_use = "server" - device_caption_to_use = f"PinePods Internal Device {user_id}" - else: - # Use default device pattern for external gPodder - device_name_to_use = "pinepods_default" - device_type_to_use = "desktop" - device_caption_to_use = "Pinepods Default Device" - - device_id = create_or_update_device( - cnx, - database_type, - user_id, - device_name_to_use, - device_type_to_use, - device_caption_to_use, - True # Set as default - ) - actual_device_name = device_name_to_use - print(f"Created new default device: {actual_device_name} (ID: {device_id})") - else: - # No device specified, use default - default_device = get_default_gpodder_device(cnx, database_type, user_id) - if default_device: - device_id = default_device["id"] - actual_device_name = default_device["name"] - print(f"Using default device: {actual_device_name} (ID: {device_id})") - else: - # No devices exist, create a default one with proper naming pattern - if is_internal_api: - # Use internal device naming pattern - device_name_to_use = f"pinepods-internal-{user_id}" - device_type_to_use = "server" - device_caption_to_use = f"PinePods Internal Device {user_id}" - else: - # Use default device pattern for external gPodder - device_name_to_use = "pinepods_default" - device_type_to_use = "desktop" - device_caption_to_use = "Pinepods Default Device" - - device_id = create_or_update_device( - cnx, - database_type, - user_id, - device_name_to_use, - device_type_to_use, - device_caption_to_use, - True # Set as default - ) - actual_device_name = device_name_to_use - print(f"Created new default device: {actual_device_name} (ID: {device_id})") - - # For remote devices, we might need to skip checking local timestamps - # and force a full sync from the GPodder server - if is_remote: - # Force a full sync by setting timestamp to 0 - timestamps = {"last_timestamp": 0} - print("Remote device selected - forcing full sync with timestamp 0") - else: - # Get sync timestamps for local device - timestamps = get_sync_timestamps(cnx, database_type, user_id, device_id) - - # Get encryption key and decrypt the GPodder token - print("Getting encryption key...") - encryption_key = get_encryption_key(cnx, database_type) - - if not encryption_key: - logger.error("Failed to retrieve encryption key") - return False - - try: - encryption_key_bytes = base64.b64decode(encryption_key) - cipher_suite = Fernet(encryption_key_bytes) - except Exception as e: - logger.error(f"Error preparing encryption key: {str(e)}") - return False - - # Special handling for encrypted_gpodder_token based on input type - if isinstance(encrypted_gpodder_token, dict): - if "data" in encrypted_gpodder_token: - print("Extracting token from dictionary input") - encrypted_gpodder_token = encrypted_gpodder_token.get("data", {}).get("gpoddertoken", "") - else: - encrypted_gpodder_token = encrypted_gpodder_token.get("gpoddertoken", "") - - # Decrypt the token - with improved error handling - gpodder_token = None - if encrypted_gpodder_token is not None and encrypted_gpodder_token != "": - try: - # Handle both string and bytes formats - if isinstance(encrypted_gpodder_token, bytes): - decrypted_token_bytes = cipher_suite.decrypt(encrypted_gpodder_token) - else: - # Make sure we're working with a valid token - token_to_decrypt = encrypted_gpodder_token - # If the token isn't in the right format for decryption, try to fix it - if not (token_to_decrypt.startswith(b'gAAAAA') if isinstance(token_to_decrypt, bytes) - else token_to_decrypt.startswith('gAAAAA')): - gpodder_token = encrypted_gpodder_token - else: - decrypted_token_bytes = cipher_suite.decrypt(token_to_decrypt.encode()) - gpodder_token = decrypted_token_bytes.decode() - except Exception as e: - logger.error(f"Error decrypting token: {str(e)}") - # For non-internal servers, we might still want to continue with whatever token we have - if is_internal_api: - # For internal server, fall back to using the raw token if decryption fails - gpodder_token = encrypted_gpodder_token - else: - # For external servers, continue with the encrypted token - gpodder_token = encrypted_gpodder_token - else: - logger.warning("No token provided") - if is_internal_api: - logger.error("Token required for internal gpodder server") - return False - - print(f"Using {'internal' if is_internal_api else 'external'} gpodder API at {gpodder_url}") - - # Create a session for cookie-based auth - session = requests.Session() - - # Handle authentication for internal API calls - if is_internal_api: - print("Using token-based auth for internal API") - # Use the token directly with the gPodder API - auth = HTTPBasicAuth(gpodder_login, encrypted_gpodder_token) - - # Try to access API using Basic Auth - try: - # First, create or update the device if needed - device_data = { - "caption": f"PinePods Internal Device {user_id}", - "type": "server" - } - device_url = f"{gpodder_url}/api/2/devices/{gpodder_login}/{actual_device_name}.json" - - try: - response = requests.post( - device_url, - json=device_data, - auth=auth - ) - if response.status_code in [200, 201]: - print(f"Updated device: {actual_device_name}") - else: - print(f"Note: Device update returned {response.status_code}") - except Exception as device_err: - print(f"Warning: Device update failed: {device_err}") - # Continue anyway - - # Now get subscriptions - subscription_url = f"{gpodder_url}/api/2/subscriptions/{gpodder_login}/{actual_device_name}.json?since={timestamps['last_timestamp']}" - print(f"Requesting subscriptions from internal API at {subscription_url}") - response = requests.get(subscription_url, auth=auth) - response.raise_for_status() - gpodder_data = response.json() - print("Successfully retrieved data from internal API") - use_session = False - except Exception as e: - logger.error(f"Failed to get subscriptions from internal API: {str(e)}") - raise - else: - # For external API, use regular basic auth as before - print("Using regular basic auth for external API") - auth = HTTPBasicAuth(gpodder_login, gpodder_token) - - # Try session-based authentication (for PodFetch) - gpodder_data = None - use_session = False - - try: - # First try to login to establish a session - login_url = f"{gpodder_url}/api/2/auth/{gpodder_login}/login.json" - print(f"Trying session-based authentication at {login_url}") - login_response = session.post(login_url, auth=auth) - login_response.raise_for_status() - print("Session login successful") - - # Use the session to get subscriptions with the since parameter - subscription_url = f"{gpodder_url}/api/2/subscriptions/{gpodder_login}/{actual_device_name}.json?since={timestamps['last_timestamp']}" - response = session.get(subscription_url) - response.raise_for_status() - gpodder_data = response.json() - use_session = True - print("Using session-based authentication") - - except Exception as e: - logger.warning(f"Session-based authentication failed: {str(e)}. Falling back to basic auth.") - # Fall back to standard auth if session auth fails - try: - subscription_url = f"{gpodder_url}/api/2/subscriptions/{gpodder_login}/{actual_device_name}.json?since={timestamps['last_timestamp']}" - print(f"Trying basic authentication at {subscription_url}") - response = requests.get(subscription_url, auth=auth) - response.raise_for_status() - gpodder_data = response.json() - print("Using basic authentication") - except Exception as e2: - logger.error(f"Basic auth also failed: {str(e2)}") - raise - - # Store timestamp for next sync if present - if gpodder_data and "timestamp" in gpodder_data: - update_sync_timestamp(cnx, database_type, user_id, device_id, "last_timestamp", gpodder_data["timestamp"]) - logger.info(f"Stored timestamp: {gpodder_data['timestamp']}") - - # Extract subscription data - gpodder_podcasts_add = gpodder_data.get("add", []) - gpodder_podcasts_remove = gpodder_data.get("remove", []) - - print(f"gPodder podcasts to add: {gpodder_podcasts_add}") - print(f"gPodder podcasts to remove: {gpodder_podcasts_remove}") - - # Get local podcasts - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'SELECT FeedURL FROM "Podcasts" WHERE UserID = %s' - else: - query = "SELECT FeedURL FROM Podcasts WHERE UserID = %s" - - cursor.execute(query, (user_id,)) - local_podcasts = set() - for row in cursor.fetchall(): - if isinstance(row, dict): - local_podcasts.add(row["feedurl"]) # PostgreSQL dict case - else: - local_podcasts.add(row[0]) # Tuple case - - podcasts_to_add = set(gpodder_podcasts_add) - local_podcasts - podcasts_to_remove = set(gpodder_podcasts_remove) & local_podcasts - - # Track successful additions and removals for sync - successful_additions = set() - successful_removals = set() - - # Add new podcasts with individual error handling - print("Adding new podcasts...") - for feed_url in podcasts_to_add: - try: - podcast_values = get_podcast_values(feed_url, user_id) - feed_cutoff = 30 - return_value = add_podcast(cnx, database_type, podcast_values, user_id, feed_cutoff) - if return_value: - print(f"Successfully added {feed_url}") - successful_additions.add(feed_url) - else: - logger.error(f"Failed to add {feed_url}") - except Exception as e: - logger.error(f"Error processing {feed_url}: {str(e)}") - continue # Continue with next podcast even if this one fails - - # Remove podcasts with individual error handling - print("Removing podcasts...") - for feed_url in podcasts_to_remove: - try: - if database_type == "postgresql": - query = 'SELECT PodcastName FROM "Podcasts" WHERE FeedURL = %s' - else: - query = "SELECT PodcastName FROM Podcasts WHERE FeedURL = %s" - - cursor.execute(query, (feed_url,)) - result = cursor.fetchone() - - if result: - podcast_name = result[0] - if remove_podcast(cnx, database_type, podcast_name, feed_url, user_id): - successful_removals.add(feed_url) - print(f"Successfully removed {feed_url}") - else: - logger.error(f"Failed to remove {feed_url}") - else: - logger.warning(f"No podcast found with URL: {feed_url}") - except Exception as e: - logger.error(f"Error removing {feed_url}: {str(e)}") - continue - - cnx.commit() - cursor.close() - - # Process episode actions using the correct device - try: - print(f"Authentication method: {'session' if use_session else 'basic auth'}") - if use_session: - print("Using SESSION authentication for episode actions") - process_episode_actions_session( - session, - gpodder_url, - gpodder_login, - cnx, - database_type, - user_id, - actual_device_name, - device_id - ) - else: - print("Using BASIC authentication for episode actions") - process_episode_actions( - gpodder_url, - gpodder_login, - auth, - cnx, - database_type, - user_id, - actual_device_name, - device_id - ) - except Exception as e: - logger.error(f"Error processing episode actions: {str(e)}") - - # Sync local episode times - try: - if use_session: - sync_local_episode_times_session( - session, - gpodder_url, - gpodder_login, - cnx, - database_type, - user_id, - actual_device_name - ) - else: - sync_local_episode_times( - gpodder_url, - gpodder_login, - auth, - cnx, - database_type, - user_id, - actual_device_name - ) - except Exception as e: - logger.error(f"Error syncing local episode times: {str(e)}") - - return True - except Exception as e: - logger.error(f"Major error in refresh_gpodder_subscription: {str(e)}") - return False - -def sync_local_episode_times_session(session, gpodder_url, gpodder_login, cnx, database_type, user_id, device_name=None, UPLOAD_BULK_SIZE=30): - """Sync local episode times using session-based authentication""" - from datetime import datetime - try: - # If no device name is provided, get the user's default device - if not device_name: - default_device = get_default_gpodder_device(cnx, database_type, user_id) - if default_device: - device_name = default_device["name"] - else: - print("WARNING: No devices found for user, episode actions will fail") - return - - # Get local episode times - local_episode_times = get_local_episode_times(cnx, database_type, user_id) - - # Skip if no episodes to sync - if not local_episode_times: - print("No episodes to sync") - return - - # Format actions with all the required fields - actions = [] - - # Format timestamp as ISO string - current_time = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S") - - for episode_time in local_episode_times: - # Only include episodes with valid duration data - if episode_time.get("episode_duration") and episode_time.get("listen_duration"): - if not episode_time.get("podcast_url") or not episode_time.get("episode_url"): - print(f"Skipping episode with missing URL data") - continue - - # If episode is completed, set position to total duration - position = (episode_time["episode_duration"] - if episode_time.get("completed", False) - else episode_time["listen_duration"]) - - # Add all required fields including device - action = { - "podcast": episode_time["podcast_url"], - "episode": episode_time["episode_url"], - "action": "play", - "position": int(position), - "total": int(episode_time["episode_duration"]), - "timestamp": current_time, - "device": device_name, - "started": 0 # Required by some implementations - } - - # Add guid if available - if episode_time.get("guid"): - action["guid"] = episode_time["guid"] - - actions.append(action) - - if not actions: - print("No valid actions to send") - return - - print(f"Prepared {len(actions)} actions to send") - print(f"First action device name: {actions[0]['device']}") - - # Split into chunks and process - actions_chunks = [ - actions[i:i + UPLOAD_BULK_SIZE] - for i in range(0, len(actions), UPLOAD_BULK_SIZE) - ] - - for chunk in actions_chunks: - try: - response = session.post( - f"{gpodder_url}/api/2/episodes/{gpodder_login}.json", - json=chunk, # Send as array - headers={"Content-Type": "application/json"} - ) - - if response.status_code < 300: - print(f"Successfully synced {len(chunk)} episode actions") - else: - print(f"Error syncing episode actions: {response.status_code} - {response.text}") - - # Debug the request - print(f"Request URL: {gpodder_url}/api/2/episodes/{gpodder_login}.json") - print(f"Request headers: {session.headers}") - print(f"First few actions in chunk: {chunk[:2]}") - except Exception as e: - print(f"Error sending actions: {str(e)}") - continue - - except Exception as e: - print(f"Error in sync_local_episode_times_session: {str(e)}") - - -def set_default_gpodder_device(cnx, database_type, user_id, device_id): - """ - Sets a device as the user's default GPodder device. - This will unset any previous default device. - - Args: - cnx: Database connection - database_type: "postgresql" or "mariadb" - user_id: User ID - device_id: Device ID to set as default - - Returns: - bool: Success or failure - """ - try: - cursor = cnx.cursor() - - # First verify the device exists and belongs to the user - if database_type == "postgresql": - query = 'SELECT DeviceID FROM "GpodderDevices" WHERE DeviceID = %s AND UserID = %s' - else: - query = 'SELECT DeviceID FROM GpodderDevices WHERE DeviceID = %s AND UserID = %s' - - cursor.execute(query, (device_id, user_id)) - if not cursor.fetchone(): - print(f"Device ID {device_id} does not exist or doesn't belong to user {user_id}") - return False - - # Start a transaction - if database_type == "postgresql": - # First, unset the current default device if any - cursor.execute(""" - UPDATE "GpodderDevices" - SET IsDefault = FALSE - WHERE UserID = %s AND IsDefault = TRUE - """, (user_id,)) - - # Then set the new default device - cursor.execute(""" - UPDATE "GpodderDevices" - SET IsDefault = TRUE - WHERE DeviceID = %s - """, (device_id,)) - else: - # First, unset the current default device if any - cursor.execute(""" - UPDATE GpodderDevices - SET IsDefault = FALSE - WHERE UserID = %s AND IsDefault = TRUE - """, (user_id,)) - - # Then set the new default device - cursor.execute(""" - UPDATE GpodderDevices - SET IsDefault = TRUE - WHERE DeviceID = %s - """, (device_id,)) - - cnx.commit() - print(f"Set default GPodder device {device_id} for user {user_id}") - return True - except Exception as e: - print(f"Error setting default GPodder device: {e}") - cnx.rollback() - return False - finally: - cursor.close() - -def get_default_gpodder_device(cnx, database_type, user_id): - """ - Gets the user's default GPodder device. - If no default is set, returns the oldest device. - - Args: - cnx: Database connection - database_type: "postgresql" or "mariadb" - user_id: User ID - - Returns: - dict: Device information or None if no devices exist - """ - try: - cursor = cnx.cursor() - - # First try to get the default device - if database_type == "postgresql": - query = """ - SELECT DeviceID, DeviceName, DeviceType, DeviceCaption, LastSync, IsActive - FROM "GpodderDevices" - WHERE UserID = %s AND IsDefault = TRUE - LIMIT 1 - """ - else: - query = """ - SELECT DeviceID, DeviceName, DeviceType, DeviceCaption, LastSync, IsActive - FROM GpodderDevices - WHERE UserID = %s AND IsDefault = TRUE - LIMIT 1 - """ - - cursor.execute(query, (user_id,)) - result = cursor.fetchone() - - if result: - # Return the default device - if isinstance(result, dict): - return { - "id": result["deviceid"], - "name": result["devicename"], - "type": result["devicetype"], - "caption": result["devicecaption"], - "last_sync": result["lastsync"], - "is_active": result["isactive"], - "is_remote": False, - "is_default": True - } - else: - return { - "id": result[0], - "name": result[1], - "type": result[2], - "caption": result[3], - "last_sync": result[4], - "is_active": result[5], - "is_remote": False, - "is_default": True - } - - # If no default device is set, get the oldest device - if database_type == "postgresql": - query = """ - SELECT DeviceID, DeviceName, DeviceType, DeviceCaption, LastSync, IsActive - FROM "GpodderDevices" - WHERE UserID = %s - ORDER BY DeviceID ASC - LIMIT 1 - """ - else: - query = """ - SELECT DeviceID, DeviceName, DeviceType, DeviceCaption, LastSync, IsActive - FROM GpodderDevices - WHERE UserID = %s - ORDER BY DeviceID ASC - LIMIT 1 - """ - - cursor.execute(query, (user_id,)) - result = cursor.fetchone() - - if result: - # Return the oldest device - if isinstance(result, dict): - return { - "id": result["deviceid"], - "name": result["devicename"], - "type": result["devicetype"], - "caption": result["devicecaption"], - "last_sync": result["lastsync"], - "is_active": result["isactive"], - "is_remote": False, - "is_default": False - } - else: - return { - "id": result[0], - "name": result[1], - "type": result[2], - "caption": result[3], - "last_sync": result[4], - "is_active": result[5], - "is_remote": False, - "is_default": False - } - - # No devices found - return None - except Exception as e: - print(f"Error getting default GPodder device: {e}") - return None - finally: - cursor.close() - - -def sync_local_episode_times(gpodder_url, gpodder_login, auth, cnx, database_type, user_id, device_name="default", UPLOAD_BULK_SIZE=30): - """Sync local episode times using basic authentication""" - import logging - from datetime import datetime - import requests - - logger = logging.getLogger(__name__) - - try: - local_episode_times = get_local_episode_times(cnx, database_type, user_id) - update_actions = [] - - for episode_time in local_episode_times: - # Only include episodes with valid duration data - if episode_time.get("episode_duration") and episode_time.get("listen_duration"): - # If episode is completed, set position to total duration - position = (episode_time["episode_duration"] - if episode_time.get("completed", False) - else episode_time["listen_duration"]) - - action = { - "podcast": episode_time["podcast_url"], - "episode": episode_time["episode_url"], - "action": "play", - "timestamp": datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S"), - "position": int(position), - "started": 0, - "total": int(episode_time["episode_duration"]), - "device": device_name # Use the specified device name - } - - # Add guid if available - if episode_time.get("guid"): - action["guid"] = episode_time["guid"] - - update_actions.append(action) - - # Skip if no actions to send - if not update_actions: - logger.info("No episode actions to upload") - return - - # Split into chunks and process - update_actions_chunks = [ - update_actions[i:i + UPLOAD_BULK_SIZE] - for i in range(0, len(update_actions), UPLOAD_BULK_SIZE) - ] - - for chunk in update_actions_chunks: - try: - response = requests.post( - f"{gpodder_url}/api/2/episodes/{gpodder_login}.json", - json=chunk, - auth=auth, - headers={"Accept": "application/json", "Content-Type": "application/json"} - ) - response.raise_for_status() - logger.info(f"Successfully synced {len(chunk)} episode actions") - except Exception as e: - logger.error(f"Error uploading chunk: {str(e)}") - continue - - except Exception as e: - logger.error(f"Error syncing local episode times: {str(e)}") - raise - -def process_episode_actions_session(session, gpodder_url, gpodder_login, cnx, database_type, user_id, device_name, device_id): - """Process incoming episode actions from gPodder using session-based authentication""" - logger = logging.getLogger(__name__) - print('running episode actions') - - try: - # Get timestamp for since parameter - timestamps = get_sync_timestamps(cnx, database_type, user_id, device_id) - episodes_timestamp = timestamps["episodes_timestamp"] - print('got timestamps') - - # Get episode actions with session and since parameter - episode_actions_response = session.get( - f"{gpodder_url}/api/2/episodes/{gpodder_login}.json?since={episodes_timestamp}&device={device_name}" - ) - episode_actions_response.raise_for_status() - episode_actions = episode_actions_response.json() - print('got actions') - - # Store timestamp for future requests - if "timestamp" in episode_actions: - update_sync_timestamp(cnx, database_type, user_id, device_id, "episodes_timestamp", episode_actions["timestamp"]) - print('stamp stored') - # Process each action - cursor = cnx.cursor() - for action in episode_actions.get('actions', []): - print('processing') - try: - if action["action"].lower() in ["play", "update_time"]: - if "position" in action and action["position"] != -1: - episode_id = get_episode_id_by_url(cnx, database_type, action["episode"]) - if episode_id: - # Update listen duration - record_listen_duration(cnx, database_type, episode_id, user_id, int(action["position"])) - # Check for completion - if ("total" in action and action["total"] > 0 and - action["position"] >= action["total"]): - if database_type == "postgresql": - update_query = ''' - UPDATE "Episodes" - SET Completed = TRUE - WHERE EpisodeID = %s - ''' - else: - update_query = ''' - UPDATE Episodes - SET Completed = TRUE - WHERE EpisodeID = %s - ''' - cursor.execute(update_query, (episode_id,)) - cnx.commit() - print(f"Marked episode {episode_id} as completed") - except Exception as e: - logger.error(f"Error processing episode action {action}: {str(e)}") - continue - cursor.close() - except Exception as e: - logger.error(f"Error fetching episode actions with session: {str(e)}") - raise - -def process_episode_actions(gpodder_url, gpodder_login, auth, cnx, database_type, user_id, device_name, device_id): - """Process incoming episode actions from gPodder using basic authentication""" - logger = logging.getLogger(__name__) - print('Running episode actions with basic auth') - try: - # Get timestamp for since parameter - timestamps = get_sync_timestamps(cnx, database_type, user_id, device_id) - episodes_timestamp = timestamps["episodes_timestamp"] - print(f'Got timestamps: {episodes_timestamp}') - - # Always include device parameter, even if it's empty - url = f"{gpodder_url}/api/2/episodes/{gpodder_login}.json?since={episodes_timestamp}" - if device_name: - url += f"&device={device_name}" - - print(f"Episode actions API URL: {url}") - - # Get episode actions with basic auth - episode_actions_response = requests.get(url, auth=auth) - print(f"Episode actions response status: {episode_actions_response.status_code}") - - # Log the raw response for debugging - response_text = episode_actions_response.text - print(f"Raw response: {response_text[:200]}...") # Log first 200 chars - - episode_actions_response.raise_for_status() - - # Parse the JSON response - episode_actions = episode_actions_response.json() - print(f"Response keys: {episode_actions.keys()}") - - # Store timestamp for future requests - if "timestamp" in episode_actions: - update_sync_timestamp(cnx, database_type, user_id, device_id, "episodes_timestamp", episode_actions["timestamp"]) - print(f'Updated timestamp to {episode_actions["timestamp"]}') - - # Check if 'actions' key exists before processing - if 'actions' not in episode_actions: - print("No 'actions' key in response. Response structure: %s", episode_actions) - return # Exit early if no actions to process - - # Process each action - same as in session version - cursor = cnx.cursor() - for action in episode_actions.get('actions', []): - try: - print(f"Processing action: {action}") - - if "action" not in action: - print(f"Action missing 'action' key: {action}") - continue - - if action["action"].lower() in ["play", "update_time"]: - if "position" in action and action["position"] != -1: - # Check if episode key exists - if "episode" not in action: - print(f"Action missing 'episode' key: {action}") - continue - - episode_id = get_episode_id_by_url(cnx, database_type, action["episode"]) - - if not episode_id: - print(f"No episode found for URL: {action['episode']}") - continue - - # Update listen duration - record_listen_duration(cnx, database_type, episode_id, user_id, int(action["position"])) - print(f"Updated listen duration for episode {episode_id}") - - # Check for completion - if ("total" in action and action["total"] > 0 and - action["position"] >= action["total"]): - if database_type == "postgresql": - update_query = ''' - UPDATE "Episodes" - SET Completed = TRUE - WHERE EpisodeID = %s - ''' - else: - update_query = ''' - UPDATE Episodes - SET Completed = TRUE - WHERE EpisodeID = %s - ''' - cursor.execute(update_query, (episode_id,)) - cnx.commit() - print(f"Marked episode {episode_id} as completed") - except Exception as e: - logger.error(f"Error processing episode action {action}: {str(e)}") - # Continue with next action rather than breaking - continue - cursor.close() - except Exception as e: - logger.error(f"Error fetching episode actions with basic auth: {str(e)}", exc_info=True) - raise - -def force_full_sync_to_gpodder(database_type, cnx, user_id, gpodder_url, encrypted_gpodder_token, gpodder_login, device_id=None, device_name=None, is_remote=False): - """Force a full sync of all local podcasts to the GPodder server""" - from cryptography.fernet import Fernet - from requests.auth import HTTPBasicAuth - import requests - import logging - import base64 # Make sure to import base64 - - print(f"Starting GPodder sync with: device_id={device_id}, device_name={device_name}, is_remote={is_remote}") - - try: - # Check if this is the internal API - is_internal_api = (gpodder_url == "http://localhost:8042") - print(f"Is internal API: {is_internal_api}") - - # Use provided device_id or get/create default - if device_id is None or device_id <= 0: # Handle negative IDs for remote devices - device_id = get_or_create_default_device(cnx, database_type, user_id) - print(f"Using default device with ID: {device_id}") - else: - print(f"Using provided device ID: {device_id}") - - # Use provided device_name or get from database - if device_name is None: - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'SELECT DeviceName FROM "GpodderDevices" WHERE DeviceID = %s' - else: - query = "SELECT DeviceName FROM GpodderDevices WHERE DeviceID = %s" - cursor.execute(query, (device_id,)) - result = cursor.fetchone() - if result: - device_name = result[0] if isinstance(result, tuple) else result["devicename"] - print(f"Found device name from database: {device_name}") - else: - # Fallback to default name if query returns nothing - device_name = "pinepods_default" - print(f"No device name found, using default: {device_name}") - cursor.close() - else: - print(f"Using provided device name: {device_name}") - - # Handle token based on whether it's internal or external API - gpodder_token = None - if is_internal_api: - # For internal API, use the token directly without decryption - gpodder_token = encrypted_gpodder_token - print("Using raw token for internal API") - else: - # For external API, decrypt the token - try: - # Fetch encryption key - encryption_key = get_encryption_key(cnx, database_type) - encryption_key_bytes = base64.b64decode(encryption_key) - cipher_suite = Fernet(encryption_key_bytes) - - # Decrypt the token - if encrypted_gpodder_token is not None: - decrypted_token_bytes = cipher_suite.decrypt(encrypted_gpodder_token.encode()) - gpodder_token = decrypted_token_bytes.decode() - print("Successfully decrypted token for external API") - else: - gpodder_token = None - print("Warning: No GPodder token provided") - except Exception as e: - print(f"Error decrypting token: {str(e)}") - # Use the token as-is if decryption fails - gpodder_token = encrypted_gpodder_token - print("Using encrypted token as fallback due to decryption error") - - # Create auth - auth = HTTPBasicAuth(gpodder_login, gpodder_token) - - # Get all local podcasts - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'SELECT FeedURL FROM "Podcasts" WHERE UserID = %s' - else: - query = "SELECT FeedURL FROM Podcasts WHERE UserID = %s" - cursor.execute(query, (user_id,)) - - local_podcasts = [] - for row in cursor.fetchall(): - if isinstance(row, dict): - local_podcasts.append(row["feedurl"]) - else: - local_podcasts.append(row[0]) - - print(f"Found {len(local_podcasts)} local podcasts to sync") - - # For internal API, skip session-based login and go straight to basic auth - if is_internal_api: - print("Internal API detected - skipping session login and using basic auth directly") - subscription_url = f"{gpodder_url}/api/2/subscriptions/{gpodder_login}/{device_name}.json" - - # Try PUT request first (standard method) - try: - print(f"Sending PUT request with basic auth to: {subscription_url}") - - response = requests.put( - subscription_url, - json=local_podcasts, - auth=auth, - headers={"Content-Type": "application/json"} - ) - - print(f"PUT response status: {response.status_code}") - response.raise_for_status() - print("Successfully pushed all podcasts to internal GPodder API") - return True - except Exception as e: - print(f"PUT request failed: {str(e)}") - - # Fall back to POST with update format - try: - print("Trying POST with update format...") - payload = { - "add": local_podcasts, - "remove": [] - } - - response = requests.post( - subscription_url, - json=payload, - auth=auth, - headers={"Content-Type": "application/json"} - ) - - response.raise_for_status() - print("Successfully updated podcasts using POST method") - return True - except Exception as e2: - print(f"POST request failed: {str(e2)}") - return False - else: - # For external API, try session login first - try: - # Try to login first to establish a session - session = requests.Session() - login_url = f"{gpodder_url}/api/2/auth/{gpodder_login}/login.json" - print(f"Logging in to external GPodder at: {login_url}") - login_response = session.post(login_url, auth=auth) - login_response.raise_for_status() - print("Session login successful for full sync") - - # Use PUT request to update subscriptions - subscription_url = f"{gpodder_url}/api/2/subscriptions/{gpodder_login}/{device_name}.json" - print(f"Sending PUT request to: {subscription_url}") - - # Debug the payload - print(f"Sending payload: {local_podcasts[:3]}... (showing first 3 of {len(local_podcasts)})") - - response = session.put( - subscription_url, - json=local_podcasts, - headers={"Content-Type": "application/json"} - ) - - # Check response - print(f"PUT response status: {response.status_code}") - print(f"PUT response text: {response.text[:200]}...") # Show first 200 chars - - response.raise_for_status() - print(f"Successfully pushed all podcasts to GPodder") - return True - - except Exception as e: - print(f"Session-based sync failed: {str(e)}. Falling back to basic auth.") - try: - # Try a different method - POST with the update API - try: - print("Trying POST to subscriptions-update API...") - update_url = f"{gpodder_url}/api/2/subscriptions/{gpodder_login}/{device_name}.json" - payload = { - "add": local_podcasts, - "remove": [] - } - response = requests.post( - update_url, - json=payload, - auth=auth, - headers={"Content-Type": "application/json"} - ) - response.raise_for_status() - print(f"Successfully updated podcasts using POST method") - return True - except Exception as e3: - print(f"Failed with POST method: {str(e3)}") - - # Fall back to basic auth with PUT - print("Falling back to basic auth with PUT...") - subscription_url = f"{gpodder_url}/api/2/subscriptions/{gpodder_login}/{device_name}.json" - response = requests.put( - subscription_url, - json=local_podcasts, - auth=auth, - headers={"Content-Type": "application/json"} - ) - - # Check response - print(f"Basic auth PUT response status: {response.status_code}") - print(f"Basic auth PUT response text: {response.text[:200]}...") # Show first 200 chars - - response.raise_for_status() - print(f"Successfully pushed all podcasts to GPodder using basic auth") - return True - except Exception as e2: - print(f"Failed to push podcasts with basic auth: {str(e2)}") - return False - - except Exception as e: - print(f"Error in force_full_sync_to_gpodder: {str(e)}") - return False - -def sync_subscription_change_gpodder_with_device(gpodder_url, gpodder_login, auth, device_name, add=None, remove=None): - """Sync subscription changes using device name""" - import requests - import logging - - logger = logging.getLogger(__name__) - - add = add or [] - remove = remove or [] - - payload = { - "add": add, - "remove": remove - } - - try: - response = requests.post( - f"{gpodder_url}/api/2/subscriptions/{gpodder_login}/{device_name}.json", - json=payload, - auth=auth - ) - response.raise_for_status() - logger.info(f"Subscription changes synced with gPodder: {response.text}") - return response.json() - except Exception as e: - logger.error(f"Error syncing subscription changes: {str(e)}") - return None - -def sync_subscription_change_gpodder_session_with_device(session, gpodder_url, gpodder_login, device_name, add=None, remove=None): - """Sync subscription changes using session-based authentication with device name""" - import logging - - logger = logging.getLogger(__name__) - - add = add or [] - remove = remove or [] - - payload = { - "add": add, - "remove": remove - } - - try: - response = session.post( - f"{gpodder_url}/api/2/subscriptions/{gpodder_login}/{device_name}.json", - json=payload, - headers={"Content-Type": "application/json"} - ) - response.raise_for_status() - logger.info(f"Subscription changes synced with gPodder using session: {response.text}") - return response.json() - except Exception as e: - logger.error(f"Error syncing subscription changes with session: {str(e)}") - return None - -def subscribe_to_person(cnx, database_type, user_id: int, person_id: int, person_name: str, person_img: str, podcast_id: int) -> tuple[bool, int]: - cursor = cnx.cursor() - try: - print(f"Starting subscribe_to_person with: user_id={user_id}, person_id={person_id}, person_name={person_name}, podcast_id={podcast_id}") - - if database_type == "postgresql": - # Check if a person with the same PeopleDBID (if not 0) or Name (if PeopleDBID is 0) exists - if person_id != 0: - query = """ - SELECT PersonID, AssociatedPodcasts FROM "People" - WHERE UserID = %s AND PeopleDBID = %s - """ - cursor.execute(query, (user_id, person_id)) - else: - query = """ - SELECT PersonID, AssociatedPodcasts FROM "People" - WHERE UserID = %s AND Name = %s AND PeopleDBID = 0 - """ - cursor.execute(query, (user_id, person_name)) - - existing_person = cursor.fetchone() - print(f"Query result: {existing_person}") - - if existing_person: - print("Found existing person, updating...") - # Person exists, update AssociatedPodcasts and possibly update image/description - person_id, associated_podcasts = existing_person - podcast_list = associated_podcasts.split(',') if associated_podcasts else [] - if str(podcast_id) not in podcast_list: - podcast_list.append(str(podcast_id)) - new_associated_podcasts = ','.join(podcast_list) - update_query = """ - UPDATE "People" - SET AssociatedPodcasts = %s, - PersonImg = COALESCE(%s, PersonImg) - WHERE PersonID = %s - """ - print(f"Executing update query: {update_query} with params: ({new_associated_podcasts}, {person_img}, {person_id})") - cursor.execute(update_query, (new_associated_podcasts, person_img, person_id)) - return True, person_id - else: - print("No existing person found, inserting new record...") - # Person doesn't exist, insert new record with image and description - insert_query = """ - INSERT INTO "People" - (UserID, PeopleDBID, Name, PersonImg, AssociatedPodcasts) - VALUES (%s, %s, %s, %s, %s) - RETURNING PersonID; - """ - print(f"Executing insert query: {insert_query} with params: ({user_id}, {person_id}, {person_name}, {person_img}, {str(podcast_id)})") - cursor.execute(insert_query, (user_id, person_id, person_name, person_img, str(podcast_id))) - result = cursor.fetchone() - print(f"Insert result: {result}") - if result is not None: - # Handle both tuple and dict return types - if isinstance(result, dict): - new_person_id = result['personid'] - else: - new_person_id = result[0] - print(f"Insert successful, new PersonID: {new_person_id}") - cnx.commit() - return True, new_person_id - else: - print("Insert did not return a PersonID") - cnx.rollback() - return False, 0 - - else: # MariaDB - # Check if person exists - if person_id != 0: - query = """ - SELECT PersonID, AssociatedPodcasts FROM People - WHERE UserID = %s AND PeopleDBID = %s - """ - cursor.execute(query, (user_id, person_id)) - else: - query = """ - SELECT PersonID, AssociatedPodcasts FROM People - WHERE UserID = %s AND Name = %s AND PeopleDBID = 0 - """ - cursor.execute(query, (user_id, person_name)) - - existing_person = cursor.fetchone() - print(f"Query result: {existing_person}") - - if existing_person: - print("Found existing person, updating...") - # Person exists, update AssociatedPodcasts - person_id = existing_person[0] # MariaDB returns tuple - associated_podcasts = existing_person[1] - podcast_list = associated_podcasts.split(',') if associated_podcasts else [] - - if str(podcast_id) not in podcast_list: - podcast_list.append(str(podcast_id)) - new_associated_podcasts = ','.join(podcast_list) - - update_query = """ - UPDATE People - SET AssociatedPodcasts = %s, - PersonImg = COALESCE(%s, PersonImg) - WHERE PersonID = %s - """ - print(f"Executing update query: {update_query} with params: ({new_associated_podcasts}, {person_img}, {person_id})") - cursor.execute(update_query, (new_associated_podcasts, person_img, person_id)) - cnx.commit() - return True, person_id - else: - print("No existing person found, inserting new record...") - # Person doesn't exist, insert new record - insert_query = """ - INSERT INTO People - (UserID, PeopleDBID, Name, PersonImg, AssociatedPodcasts) - VALUES (%s, %s, %s, %s, %s) - """ - print(f"Executing insert query: {insert_query} with params: ({user_id}, {person_id}, {person_name}, {person_img}, {str(podcast_id)})") - cursor.execute(insert_query, (user_id, person_id, person_name, person_img, str(podcast_id))) - cnx.commit() - - # Get the inserted ID - new_person_id = cursor.lastrowid - print(f"Insert successful, new PersonID: {new_person_id}") - - if new_person_id: - return True, new_person_id - else: - print("Insert did not return a PersonID") - cnx.rollback() - return False, 0 - - except Exception as e: - print(f"Detailed error in subscribe_to_person: {str(e)}\nType: {type(e)}") - import traceback - print(f"Traceback: {traceback.format_exc()}") - cnx.rollback() - return False, 0 - finally: - cursor.close() - - return False, 0 # In case we somehow get here - -def unsubscribe_from_person(cnx, database_type, user_id: int, person_id: int, person_name: str) -> bool: - cursor = cnx.cursor() - try: - print(f"Attempting to unsubscribe user {user_id} from person {person_name} (ID: {person_id})") - if database_type == "postgresql": - # Use PersonID instead of PeopleDBID for looking up the record to delete - person_query = 'SELECT PersonID FROM "People" WHERE UserID = %s AND PersonID = %s' - print(f"Searching for person with query: {person_query} and params: {user_id}, {person_id}") - cursor.execute(person_query, (user_id, person_id)) - - else: - person_query = "SELECT PersonID FROM People WHERE UserID = %s AND PersonID = %s" - cursor.execute(person_query, (user_id, person_id)) - - result = cursor.fetchone() - print(f"Query result: {result}") - if not result: - print(f"No person found for user {user_id} with ID {person_id}") - return False - - # Handle both tuple and dict return types - # Handle both tuple and dict return types - if isinstance(result, dict): - person_db_id = result['personid'] - else: - person_db_id = result[0] - print(f"Found PersonID: {person_db_id}") - - if database_type == "postgresql": - check_query = 'SELECT COUNT(*) FROM "People" WHERE PersonID = %s' - delete_query = 'DELETE FROM "People" WHERE PersonID = %s' - else: - check_query = "SELECT COUNT(*) FROM People WHERE PersonID = %s" - delete_query = "DELETE FROM People WHERE PersonID = %s" - - # Check subscriber count for both database types - cursor.execute(check_query, (person_id,)) - subscriber_count = cursor.fetchone()[0] - - # Only delete episodes if this is the last subscriber - if subscriber_count <= 1: - if database_type == "postgresql": - episodes_query = 'DELETE FROM "PeopleEpisodes" WHERE PersonID = %s' - else: - episodes_query = "DELETE FROM PeopleEpisodes WHERE PersonID = %s" - - print(f"Deleting episodes for PersonID {person_db_id}") - cursor.execute(episodes_query, (person_db_id,)) - episode_count = cursor.rowcount - print(f"Deleted {episode_count} episodes") - - # Always delete the person record for this user - print(f"Deleting person record for PersonID {person_db_id}") - cursor.execute(delete_query, (person_db_id,)) - person_count = cursor.rowcount - print(f"Deleted {person_count} person records") - - cnx.commit() - return True - - except Exception as e: - print(f"Error unsubscribing from person: {str(e)}") - print(f"Error type: {type(e)}") - if hasattr(e, '__cause__'): - print(f"Cause: {e.__cause__}") - cnx.rollback() - return False - finally: - cursor.close() - -def get_person_subscriptions(cnx, database_type, user_id: int) -> List[dict]: - try: - if database_type == "postgresql": - cursor = cnx.cursor(row_factory=dict_row) - query = 'SELECT * FROM "People" WHERE UserID = %s' - else: # MySQL or MariaDB - cursor = cnx.cursor(dictionary=True) - query = "SELECT * FROM People WHERE UserID = %s" - - cursor.execute(query, (user_id,)) - result = cursor.fetchall() - print(f"Raw database result: {result}") - - formatted_result = [] - for row in result: - if database_type == "postgresql": - # PostgreSQL returns lowercase keys - formatted_row = { - 'personid': int(row['personid']), - 'userid': int(row['userid']), - 'name': row['name'], - 'image': row['personimg'], - 'peopledbid': int(row['peopledbid']) if row['peopledbid'] is not None else None, - 'associatedpodcasts': row['associatedpodcasts'], - } - else: - # MariaDB returns uppercase keys - formatted_row = { - 'personid': int(row['PersonID']), - 'userid': int(row['UserID']), - 'name': row['Name'], - 'image': row['PersonImg'], - 'peopledbid': int(row['PeopleDBID']) if row['PeopleDBID'] is not None else None, - 'associatedpodcasts': row['AssociatedPodcasts'], - } - formatted_result.append(formatted_row) - - return formatted_result - - except Exception as e: - print(f"Error getting person subscriptions: {e}") - import traceback - print(f"Traceback: {traceback.format_exc()}") - return [] - finally: - cursor.close() - - -def backup_user(database_type, cnx, user_id): - if database_type == "postgresql": - cursor = cnx.cursor(row_factory=psycopg.rows.dict_row) - query_fetch_podcasts = 'SELECT PodcastName, FeedURL FROM "Podcasts" WHERE UserID = %s' - else: - cursor = cnx.cursor(dictionary=True) - query_fetch_podcasts = "SELECT PodcastName, FeedURL FROM Podcasts WHERE UserID = %s" - - cursor.execute(query_fetch_podcasts, (user_id,)) - podcasts = cursor.fetchall() - cursor.close() - - opml_content = '\n\n \n Podcast Subscriptions\n \n \n' - - if database_type == "postgresql": - for podcast in podcasts: - opml_content += f' \n' - else: - for podcast in podcasts: - opml_content += f' \n' - - opml_content += ' \n' - - return opml_content - - - -def backup_server(database_type, cnx, database_pass): - # Get database name from environment variable - db_name = os.environ.get("DB_NAME", "pinepods_database") # Default to pinepods_database if not set - db_host = os.environ.get("DB_HOST", "db") - db_port = os.environ.get("DB_PORT", "5432" if database_type == "postgresql" else "3306") - db_user = os.environ.get("DB_USER", "postgres" if database_type == "postgresql" else "root") - - print(f'pass: {database_pass}') - if database_type == "postgresql": - os.environ['PGPASSWORD'] = database_pass - cmd = [ - "pg_dump", - "-h", db_host, - "-p", db_port, - "-U", db_user, - "-d", db_name, - "-w" - ] - else: # Assuming MySQL or MariaDB - cmd = [ - "mysqldump", - "-h", db_host, - "-P", db_port, - "-u", db_user, - "--password=" + database_pass, - db_name - ] - try: - process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - stdout, stderr = process.communicate() - print("STDOUT:", stdout.decode()) - print("STDERR:", stderr.decode()) - if process.returncode != 0: - # Handle error - raise Exception(f"Backup failed with error: {stderr.decode()}") - return stdout.decode() - finally: - if database_type == "postgresql": - del os.environ['PGPASSWORD'] - - -def restore_server(cnx, database_pass, file_content): - import tempfile - - with tempfile.NamedTemporaryFile(mode='wb', delete=True) as tempf: - tempf.write(file_content) - tempf.flush() - - cmd = [ - "mysql", - "-h", os.environ.get("DB_HOST", "db"), - "-P", os.environ.get("DB_PORT", "3306"), - "-u", os.environ.get("DB_USER", "root"), - f"-p{database_pass}", - os.environ.get("DB_NAME", "pinepods_database") - ] - - process = subprocess.Popen( - cmd, - stdin=open(tempf.name, 'rb'), - stdout=subprocess.PIPE, - stderr=subprocess.PIPE - ) - - stdout, stderr = process.communicate() - if process.returncode != 0: - raise Exception(f"Restoration failed with error: {stderr.decode()}") - - return "Restoration completed successfully!" - - -def get_video_date(video_id): - logging.basicConfig(level=logging.INFO) - logger = logging.getLogger(__name__) - """Get upload date for a single video""" - url = f"https://www.youtube.com/watch?v={video_id}" - headers = { - "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36" - } - - try: - # Add a small random delay to avoid rate limiting - time.sleep(random.uniform(0.5, 1.5)) - - response = requests.get(url, headers=headers) - response.raise_for_status() - - # Look for uploadDate in page content - date_pattern = r'"uploadDate":"([^"]+)"' - date_match = re.search(date_pattern, response.text) - - if date_match: - date_str = date_match.group(1) - # Convert ISO format to datetime - upload_date = datetime.datetime.fromisoformat(date_str.replace('Z', '+00:00')) - return upload_date - return None - - except Exception as e: - logger.error(f"Error fetching date for video {video_id}: {e}") - return None - -def check_and_send_notification(cnx, database_type, podcast_id, episode_title): - cursor = cnx.cursor() - try: - # First check if notifications are enabled for this podcast - if database_type == "postgresql": - query = """ - SELECT p.NotificationsEnabled, p.UserID, p.PodcastName, - uns.Platform, uns.Enabled, uns.NtfyTopic, uns.NtfyServerUrl, - uns.GotifyUrl, uns.GotifyToken - FROM "Podcasts" p - JOIN "UserNotificationSettings" uns ON p.UserID = uns.UserID - WHERE p.PodcastID = %s AND p.NotificationsEnabled = true AND uns.Enabled = true - """ - else: - query = """ - SELECT p.NotificationsEnabled, p.UserID, p.PodcastName, - uns.Platform, uns.Enabled, uns.NtfyTopic, uns.NtfyServerUrl, - uns.GotifyUrl, uns.GotifyToken - FROM Podcasts p - JOIN UserNotificationSettings uns ON p.UserID = uns.UserID - WHERE p.PodcastID = %s AND p.NotificationsEnabled = 1 AND uns.Enabled = 1 - """ - cursor.execute(query, (podcast_id,)) - results = cursor.fetchall() # Get all enabled notification settings - if not results: - return False - - success = False # Track if at least one notification was sent - - for result in results: - try: - if isinstance(result, dict): - platform = result['platform'] if 'platform' in result else result['Platform'] - podcast_name = result['podcastname'] if 'podcastname' in result else result['PodcastName'] - - if platform == 'ntfy': - # Try both casings for each field - ntfy_topic = result.get('ntfytopic') or result.get('NtfyTopic') - ntfy_server = result.get('ntfyserverurl') or result.get('NtfyServerUrl') - - if ntfy_topic and ntfy_server: - if send_ntfy_notification( - topic=ntfy_topic, - server_url=ntfy_server, - title=f"New Episode: {podcast_name}", - message=f"New episode published: {episode_title}" - ): - success = True - - elif platform == 'gotify': - gotify_url = result.get('gotifyurl') or result.get('GotifyUrl') - gotify_token = result.get('gotifytoken') or result.get('GotifyToken') - - if gotify_url and gotify_token: - if send_gotify_notification( - server_url=gotify_url, - token=gotify_token, - title=f"New Episode: {podcast_name}", - message=f"New episode published: {episode_title}" - ): - success = True - else: - platform = result[3] - podcast_name = result[2] - if platform == 'ntfy': - if send_ntfy_notification( - topic=result[5], - server_url=result[6], - title=f"New Episode: {podcast_name}", - message=f"New episode published: {episode_title}" - ): - success = True - elif platform == 'gotify': - if send_gotify_notification( - server_url=result[7], - token=result[8], - title=f"New Episode: {podcast_name}", - message=f"New episode published: {episode_title}" - ): - success = True - except Exception as e: - logging.error(f"Error sending {platform} notification: {e}") - # Continue trying other platforms even if one fails - continue - - return success - - except Exception as e: - logging.error(f"Error checking/sending notifications: {e}") - return False - finally: - cursor.close() - -def toggle_podcast_notifications(cnx, database_type, podcast_id, user_id, enabled): - cursor = cnx.cursor() - try: - # First verify the user owns this podcast - if database_type == "postgresql": - check_query = """ - SELECT 1 FROM "Podcasts" - WHERE PodcastID = %s AND UserID = %s - """ - else: - check_query = """ - SELECT 1 FROM Podcasts - WHERE PodcastID = %s AND UserID = %s - """ - - cursor.execute(check_query, (podcast_id, user_id)) - if not cursor.fetchone(): - logging.warning(f"User {user_id} attempted to modify notifications for podcast {podcast_id} they don't own") - return False - - # Update the notification setting - if database_type == "postgresql": - update_query = """ - UPDATE "Podcasts" - SET NotificationsEnabled = %s - WHERE PodcastID = %s AND UserID = %s - """ - else: - update_query = """ - UPDATE Podcasts - SET NotificationsEnabled = %s - WHERE PodcastID = %s AND UserID = %s - """ - - cursor.execute(update_query, (enabled, podcast_id, user_id)) - cnx.commit() - return True - - except Exception as e: - logging.error(f"Error toggling podcast notifications: {e}") - cnx.rollback() - return False - finally: - cursor.close() - -def get_podcast_notification_status(cnx, database_type, podcast_id, user_id): - cursor = cnx.cursor() - try: - # Query the notification status - if database_type == "postgresql": - query = """ - SELECT NotificationsEnabled - FROM "Podcasts" - WHERE PodcastID = %s AND UserID = %s - """ - else: - query = """ - SELECT NotificationsEnabled - FROM Podcasts - WHERE PodcastID = %s AND UserID = %s - """ - cursor.execute(query, (podcast_id, user_id)) - result = cursor.fetchone() - if result: - if isinstance(result, dict): # PostgreSQL with RealDictCursor - # Try all possible case variations - for key in ['NotificationsEnabled', 'notificationsenabled']: - if key in result: - return bool(result[key]) - else: # MySQL or regular PostgreSQL cursor - return bool(result[0]) - return False # Default to False if no result found - except Exception as e: - logging.error(f"Error getting podcast notification status: {e}") - logging.error(f"Result content: {result}") # Add this for debugging - return False - finally: - cursor.close() - -# Functions for OIDC - -def get_oidc_provider(cnx, database_type, client_id): - cursor = cnx.cursor() - try: - if database_type == "postgresql": - query = """ - SELECT ProviderID, ClientID, ClientSecret, TokenURL, UserInfoURL, NameClaim, EmailClaim, UsernameClaim, RolesClaim, UserRole, AdminRole - FROM "OIDCProviders" - WHERE ClientID = %s AND Enabled = true - """ - else: - query = """ - SELECT ProviderID, ClientID, ClientSecret, TokenURL, UserInfoURL, NameClaim, EmailClaim, UsernameClaim, RolesClaim, UserRole, AdminRole - FROM OIDCProviders - WHERE ClientID = %s AND Enabled = true - """ - cursor.execute(query, (client_id,)) - result = cursor.fetchone() - if result: - if isinstance(result, dict): - return ( - result['providerid'], - result['clientid'], - result['clientsecret'], - result['tokenurl'], - result['userinfourl'], - result['nameclaim'], - result['emailclaim'], - result['usernameclaim'], - result['rolesclaim'], - result['userrole'], - result['adminrole'] - ) - return result - return None - finally: - cursor.close() - -def get_user_by_email(cnx, database_type, email): - cursor = cnx.cursor() - try: - if database_type == "postgresql": - query = """ - SELECT UserID, Email, Username, Fullname, IsAdmin - FROM "Users" - WHERE Email = %s - """ - else: - query = """ - SELECT UserID, Email, Username, Fullname, IsAdmin - FROM Users - WHERE Email = %s - """ - cursor.execute(query, (email,)) - result = cursor.fetchone() - if result: - if isinstance(result, dict): - return ( - result['userid'], - result['email'], - result['username'], - result['fullname'], - result['isadmin'] - ) - return result - return None - finally: - cursor.close() - -def create_oidc_user(cnx, database_type, email, fullname, username): - cursor = cnx.cursor() - try: - print(f"Starting create_oidc_user for email: {email}, fullname: {fullname}, username: {username}") - - # Create a random salt using base64 (which is what Argon2 expects) - salt = base64.b64encode(secrets.token_bytes(16)).decode('utf-8') - # Create an impossible-to-match hash that's clearly marked as OIDC - # Using proper Argon2id format but with an impossible hash - hashed_password = f"$argon2id$v=19$m=65536,t=3,p=4${salt}${'X' * 43}_OIDC_ACCOUNT_NO_PASSWORD" - - print(f"Inserting new user with username: {username}, email: {email}") - # Insert user - if database_type == "postgresql": - query = """ - INSERT INTO "Users" - (Fullname, Username, Email, Hashed_PW, IsAdmin) - VALUES (%s, %s, %s, %s, false) - RETURNING UserID - """ - else: - query = """ - INSERT INTO Users - (Fullname, Username, Email, Hashed_PW, IsAdmin) - VALUES (%s, %s, %s, %s, 0) - """ - cursor.execute(query, (fullname, username, email, hashed_password)) - - # Get user ID - if database_type == "postgresql": - result = cursor.fetchone() - print(f"PostgreSQL INSERT result: {result}, type: {type(result)}") - - if result is None: - print("ERROR: No result returned from INSERT RETURNING") - raise Exception("No user ID returned from database after insertion") - - # Handle different result types - if isinstance(result, tuple): - print(f"Result is tuple: {result}") - user_id = result[0] - elif isinstance(result, dict): - print(f"Result is dict: {result}") - # Note: PostgreSQL column names are lowercase by default - user_id = result.get('userid') - if user_id is None: - # Try other possible key variations - user_id = result.get('UserID') or result.get('userID') or result.get('user_id') - else: - print(f"Unexpected result type: {type(result)}, value: {result}") - # Try to extract user_id safely - try: - # Try accessing as a number - user_id = int(result) - except (TypeError, ValueError): - # If that fails, convert to string and raise exception - result_str = str(result) - print(f"Result as string: {result_str}") - raise Exception(f"Unable to extract user_id from result: {result_str}") - else: - user_id = cursor.lastrowid - print(f"MySQL lastrowid: {user_id}") - - print(f"Extracted user_id: {user_id}, type: {type(user_id)}") - - if not user_id: - print("ERROR: user_id is empty or zero") - raise Exception("Invalid user_id after user creation") - - # Add default user settings - print(f"Inserting default user settings for user_id: {user_id}") - settings_query = """ - INSERT INTO "UserSettings" - (UserID, Theme) - VALUES (%s, %s) - """ if database_type == "postgresql" else """ - INSERT INTO UserSettings - (UserID, Theme) - VALUES (%s, %s) - """ - cursor.execute(settings_query, (user_id, 'Nordic')) - - # Add default user stats - print(f"Inserting default user stats for user_id: {user_id}") - stats_query = """ - INSERT INTO "UserStats" - (UserID) - VALUES (%s) - """ if database_type == "postgresql" else """ - INSERT INTO UserStats - (UserID) - VALUES (%s) - """ - cursor.execute(stats_query, (user_id,)) - - print(f"Committing transaction") - cnx.commit() - print(f"User creation complete, returning user_id: {user_id}") - return user_id - except Exception as e: - print(f"Error in create_oidc_user: {str(e)}") - import traceback - print(f"Traceback: {traceback.format_exc()}") - cnx.rollback() - raise - finally: - cursor.close() - -def get_user_startpage(cnx, database_type, user_id): - cursor = cnx.cursor() - try: - if database_type == "postgresql": - query = """ - SELECT StartPage - FROM "UserSettings" - WHERE UserID = %s - """ - else: - query = """ - SELECT StartPage - FROM UserSettings - WHERE UserID = %s - """ - - cursor.execute(query, (user_id,)) - result = cursor.fetchone() - - # Return 'home' as default if no setting is found - if result: - return result[0] if isinstance(result, tuple) else result['startpage'] - return 'home' - - except Exception as e: - raise - finally: - cursor.close() - -def set_user_startpage(cnx, database_type, user_id, startpage): - cursor = cnx.cursor() - try: - if database_type == "postgresql": - query = """ - UPDATE "UserSettings" - SET StartPage = %s - WHERE UserID = %s - """ - else: - query = """ - UPDATE UserSettings - SET StartPage = %s - WHERE UserID = %s - """ - - cursor.execute(query, (startpage, user_id)) - cnx.commit() - return True - - except Exception as e: - cnx.rollback() - raise - finally: - cursor.close() - - -def convert_booleans(data): - boolean_fields = ['completed', 'saved', 'queued', 'downloaded', 'is_youtube', 'explicit', 'is_system_playlist', 'include_unplayed', 'include_partially_played', 'include_played'] - - if isinstance(data, dict): - for key, value in data.items(): - if key in boolean_fields and value is not None: - # Convert 0/1 to False/True for known boolean fields - data[key] = bool(value) - elif isinstance(value, (dict, list)): - # Recursively process nested dictionaries and lists - data[key] = convert_booleans(value) - elif isinstance(data, list): - # Process each item in the list - for i, item in enumerate(data): - data[i] = convert_booleans(item) - - return data - -def get_home_overview(database_type, cnx, user_id): - if database_type == "postgresql": - cnx.row_factory = dict_row - cursor = cnx.cursor() - else: - cursor = cnx.cursor(dictionary=True) - - home_data = { - "recent_episodes": [], - "in_progress_episodes": [], - "top_podcasts": [], - "saved_count": 0, - "downloaded_count": 0, - "queue_count": 0 - } - - # Recent Episodes query with is_youtube field - if database_type == "postgresql": - recent_query = """ - SELECT - "Episodes".EpisodeID, - "Episodes".EpisodeTitle, - "Episodes".EpisodePubDate, - "Episodes".EpisodeDescription, - "Episodes".EpisodeArtwork, - "Episodes".EpisodeURL, - "Episodes".EpisodeDuration, - "Episodes".Completed, - "Podcasts".PodcastName, - "Podcasts".PodcastID, - "Podcasts".IsYouTubeChannel as is_youtube, - "UserEpisodeHistory".ListenDuration, - CASE WHEN "SavedEpisodes".EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS saved, - CASE WHEN "EpisodeQueue".EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS queued, - CASE WHEN "DownloadedEpisodes".EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded - FROM "Episodes" - INNER JOIN "Podcasts" ON "Episodes".PodcastID = "Podcasts".PodcastID - LEFT JOIN "UserEpisodeHistory" ON - "Episodes".EpisodeID = "UserEpisodeHistory".EpisodeID - AND "UserEpisodeHistory".UserID = %s - LEFT JOIN "SavedEpisodes" ON - "Episodes".EpisodeID = "SavedEpisodes".EpisodeID - AND "SavedEpisodes".UserID = %s - LEFT JOIN "EpisodeQueue" ON - "Episodes".EpisodeID = "EpisodeQueue".EpisodeID - AND "EpisodeQueue".UserID = %s - LEFT JOIN "DownloadedEpisodes" ON - "Episodes".EpisodeID = "DownloadedEpisodes".EpisodeID - AND "DownloadedEpisodes".UserID = %s - WHERE "Podcasts".UserID = %s - AND "Episodes".EpisodePubDate >= NOW() - INTERVAL '7 days' - ORDER BY "Episodes".EpisodePubDate DESC - LIMIT 10 - """ - else: # MySQL or MariaDB - recent_query = """ - SELECT - Episodes.EpisodeID, - Episodes.EpisodeTitle, - Episodes.EpisodePubDate, - Episodes.EpisodeDescription, - Episodes.EpisodeArtwork, - Episodes.EpisodeURL, - Episodes.EpisodeDuration, - Episodes.Completed, - Podcasts.PodcastName, - Podcasts.PodcastID, - Podcasts.IsYouTubeChannel as is_youtube, - UserEpisodeHistory.ListenDuration, - CASE WHEN SavedEpisodes.EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS saved, - CASE WHEN EpisodeQueue.EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS queued, - CASE WHEN DownloadedEpisodes.EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded - FROM Episodes - INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID - LEFT JOIN UserEpisodeHistory ON - Episodes.EpisodeID = UserEpisodeHistory.EpisodeID - AND UserEpisodeHistory.UserID = %s - LEFT JOIN SavedEpisodes ON - Episodes.EpisodeID = SavedEpisodes.EpisodeID - AND SavedEpisodes.UserID = %s - LEFT JOIN EpisodeQueue ON - Episodes.EpisodeID = EpisodeQueue.EpisodeID - AND EpisodeQueue.UserID = %s - LEFT JOIN DownloadedEpisodes ON - Episodes.EpisodeID = DownloadedEpisodes.EpisodeID - AND DownloadedEpisodes.UserID = %s - WHERE Podcasts.UserID = %s - AND Episodes.EpisodePubDate >= DATE_SUB(NOW(), INTERVAL 7 DAY) - ORDER BY Episodes.EpisodePubDate DESC - LIMIT 10 - """ - - # In Progress Episodes query with is_youtube field - in_progress_query = """ - SELECT - "Episodes".*, - "Podcasts".PodcastName, - "Podcasts".IsYouTubeChannel as is_youtube, - "UserEpisodeHistory".ListenDuration, - CASE WHEN "SavedEpisodes".EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS saved, - CASE WHEN "EpisodeQueue".EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS queued, - CASE WHEN "DownloadedEpisodes".EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded - FROM "UserEpisodeHistory" - JOIN "Episodes" ON "UserEpisodeHistory".EpisodeID = "Episodes".EpisodeID - JOIN "Podcasts" ON "Episodes".PodcastID = "Podcasts".PodcastID - LEFT JOIN "SavedEpisodes" ON - "Episodes".EpisodeID = "SavedEpisodes".EpisodeID - AND "SavedEpisodes".UserID = %s - LEFT JOIN "EpisodeQueue" ON - "Episodes".EpisodeID = "EpisodeQueue".EpisodeID - AND "EpisodeQueue".UserID = %s - LEFT JOIN "DownloadedEpisodes" ON - "Episodes".EpisodeID = "DownloadedEpisodes".EpisodeID - AND "DownloadedEpisodes".UserID = %s - WHERE "UserEpisodeHistory".UserID = %s - AND "UserEpisodeHistory".ListenDuration > 0 - AND "Episodes".Completed = FALSE - ORDER BY "UserEpisodeHistory".ListenDate DESC - LIMIT 10 - """ if database_type == "postgresql" else """ - SELECT - Episodes.*, - Podcasts.PodcastName, - Podcasts.IsYouTubeChannel as is_youtube, - UserEpisodeHistory.ListenDuration, - CASE WHEN SavedEpisodes.EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS saved, - CASE WHEN EpisodeQueue.EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS queued, - CASE WHEN DownloadedEpisodes.EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded - FROM UserEpisodeHistory - JOIN Episodes ON UserEpisodeHistory.EpisodeID = Episodes.EpisodeID - JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID - LEFT JOIN SavedEpisodes ON - Episodes.EpisodeID = SavedEpisodes.EpisodeID - AND SavedEpisodes.UserID = %s - LEFT JOIN EpisodeQueue ON - Episodes.EpisodeID = EpisodeQueue.EpisodeID - AND EpisodeQueue.UserID = %s - LEFT JOIN DownloadedEpisodes ON - Episodes.EpisodeID = DownloadedEpisodes.EpisodeID - AND DownloadedEpisodes.UserID = %s - WHERE UserEpisodeHistory.UserID = %s - AND UserEpisodeHistory.ListenDuration > 0 - AND Episodes.Completed = FALSE - ORDER BY UserEpisodeHistory.ListenDate DESC - LIMIT 10 - """ - - # Top Podcasts query with all needed fields - top_podcasts_query = """ - SELECT - "Podcasts".PodcastID, - "Podcasts".PodcastName, - "Podcasts".PodcastIndexID, - "Podcasts".ArtworkURL, - "Podcasts".Author, - "Podcasts".Categories, - "Podcasts".Description, - "Podcasts".EpisodeCount, - "Podcasts".FeedURL, - "Podcasts".WebsiteURL, - "Podcasts".Explicit, - "Podcasts".IsYouTubeChannel as is_youtube, - COUNT(DISTINCT "UserEpisodeHistory".EpisodeID) as play_count, - SUM("UserEpisodeHistory".ListenDuration) as total_listen_time - FROM "Podcasts" - LEFT JOIN "Episodes" ON "Podcasts".PodcastID = "Episodes".PodcastID - LEFT JOIN "UserEpisodeHistory" ON "Episodes".EpisodeID = "UserEpisodeHistory".EpisodeID - WHERE "Podcasts".UserID = %s - GROUP BY "Podcasts".PodcastID - ORDER BY total_listen_time DESC NULLS LAST - LIMIT 6 - """ if database_type == "postgresql" else """ - SELECT - Podcasts.PodcastID, - Podcasts.PodcastName, - Podcasts.PodcastIndexID, - Podcasts.ArtworkURL, - Podcasts.Author, - Podcasts.Categories, - Podcasts.Description, - Podcasts.EpisodeCount, - Podcasts.FeedURL, - Podcasts.WebsiteURL, - Podcasts.Explicit, - Podcasts.IsYouTubeChannel as is_youtube, - COUNT(DISTINCT UserEpisodeHistory.EpisodeID) as play_count, - SUM(UserEpisodeHistory.ListenDuration) as total_listen_time - FROM Podcasts - LEFT JOIN Episodes ON Podcasts.PodcastID = Episodes.PodcastID - LEFT JOIN UserEpisodeHistory ON Episodes.EpisodeID = UserEpisodeHistory.EpisodeID - WHERE Podcasts.UserID = %s - GROUP BY Podcasts.PodcastID - ORDER BY total_listen_time DESC - LIMIT 5 - """ - - try: - # Get recent episodes - need to pass 5 parameters as we have 5 placeholders - cursor.execute(recent_query, (user_id, user_id, user_id, user_id, user_id)) - recent_results = cursor.fetchall() - if recent_results is not None: - home_data["recent_episodes"] = lowercase_keys(recent_results) - - # Get in progress episodes - need to pass 4 parameters as we have 4 placeholders - cursor.execute(in_progress_query, (user_id, user_id, user_id, user_id)) - in_progress_results = cursor.fetchall() - if in_progress_results is not None: - home_data["in_progress_episodes"] = lowercase_keys(in_progress_results) - - # Get top podcasts - cursor.execute(top_podcasts_query, (user_id,)) - top_podcasts_results = cursor.fetchall() - if top_podcasts_results is not None: - home_data["top_podcasts"] = lowercase_keys(top_podcasts_results) - - # Get counts - if database_type == "postgresql": - for table, key in [ - ("SavedEpisodes", "saved_count"), - ("DownloadedEpisodes", "downloaded_count"), - ("EpisodeQueue", "queue_count") - ]: - count_query = f'SELECT COUNT(*) FROM "{table}" WHERE userid = %s' - cursor.execute(count_query, (user_id,)) - count_result = cursor.fetchone() - if count_result is not None: - home_data[key] = count_result[0] if isinstance(count_result, tuple) else count_result.get('count', 0) - - except Exception as e: - print(f"Error fetching home overview: {e}") - print(f"Error type: {type(e)}") - import traceback - traceback.print_exc() - return None - finally: - cursor.close() - - if database_type != "postgresql": - home_data = convert_booleans(home_data) - - return lowercase_keys(home_data) - -def get_playback_speed(cnx, database_type: str, user_id: int, is_youtube: bool, podcast_id: Optional[int] = None) -> float: - cursor = cnx.cursor() - if database_type == "postgresql": - if podcast_id is None: - query = 'SELECT PlaybackSpeed FROM "Users" WHERE UserID = %s' - else: - query = 'SELECT PlaybackSpeed FROM "Podcasts" WHERE PodcastID = %s' - else: - if podcast_id is None: - query = 'SELECT PlaybackSpeed FROM Users WHERE UserID = %s' - else: - query = 'SELECT PlaybackSpeed FROM Podcasts WHERE PodcastID = %s' - cursor.execute(query, (user_id,)) - result = cursor.fetchone() - cursor.close() - - if result: - # Handle both tuple and dictionary return types with case variations - if isinstance(result, dict): - # Try both cases for PostgreSQL vs MySQL/MariaDB - return result.get('PlaybackSpeed') or result.get('playbackspeed') or 1.0 - else: - return result[0] if result[0] is not None else 1.0 - return 1.0 diff --git a/database_functions/gpodder_router.py b/database_functions/gpodder_router.py deleted file mode 100644 index d813b720..00000000 --- a/database_functions/gpodder_router.py +++ /dev/null @@ -1,669 +0,0 @@ -from fastapi import APIRouter, Depends, HTTPException, status, Request, Response -from pydantic import BaseModel -from typing import List, Dict, Optional, Any -import sys -import base64 - -# Internal Modules -sys.path.append('/pinepods') - -import database_functions.functions -from database_functions.db_client import get_database_connection, database_type - -# Create models for the API -class DeviceCreate(BaseModel): - user_id: int - device_name: str - device_type: Optional[str] = "desktop" - device_caption: Optional[str] = None - -class Device(BaseModel): - id: int - name: str - type: str - caption: Optional[str] = None - last_sync: Optional[str] = None - is_active: bool = True - is_remote: Optional[bool] = False - is_default: Optional[bool] = False - -class SyncRequest(BaseModel): - user_id: int - device_id: Optional[int] = None - device_name: Optional[str] = None - is_remote: bool = False - -class ApiResponse(BaseModel): - success: bool - message: str - data: Optional[Any] = None - -# Create the router -gpodder_router = APIRouter(prefix="/api/gpodder", tags=["gpodder"]) - -# Authentication function (assumed to be defined elsewhere) -async def get_api_key_from_header(request: Request): - api_key = request.headers.get("Api-Key") - if not api_key: - raise HTTPException(status_code=403, detail="API key is required") - return api_key - -async def has_elevated_access(api_key: str, cnx): - user_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - is_admin = database_functions.functions.user_admin_check(cnx, database_type, user_id) - return is_admin - -@gpodder_router.get("/devices/{user_id}", response_model=List[Device]) -async def get_user_devices_endpoint( - user_id: int, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - """Get all GPodder devices for a user (both local and remote)""" - import logging - import requests - from requests.auth import HTTPBasicAuth - - logger = logging.getLogger(__name__) - - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException( - status_code=403, - detail="Your API key is either invalid or does not have correct permission" - ) - - # Check if the user has permission - elevated_access = await has_elevated_access(api_key, cnx) - if not elevated_access: - user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - if user_id != user_id_from_api_key: - raise HTTPException( - status_code=status.HTTP_403_FORBIDDEN, - detail="You are not authorized to access these devices" - ) - - # Get local devices with our updated function that handles datetime conversion - local_devices = database_functions.functions.get_user_devices(cnx, database_type, user_id) - - # Create a default device if no local devices exist - if not local_devices: - default_device_id = database_functions.functions.get_or_create_default_device(cnx, database_type, user_id) - if default_device_id: - local_devices = database_functions.functions.get_user_devices(cnx, database_type, user_id) - - # Get GPodder settings to fetch remote devices - cursor = cnx.cursor() - try: - if database_type == "postgresql": - query = 'SELECT GpodderUrl, GpodderLoginName, GpodderToken FROM "Users" WHERE UserID = %s' - else: - query = "SELECT GpodderUrl, GpodderLoginName, GpodderToken FROM Users WHERE UserID = %s" - - cursor.execute(query, (user_id,)) - result = cursor.fetchone() - - if not result: - logger.warning(f"User {user_id} not found or has no GPodder settings") - return local_devices - - if isinstance(result, dict): - gpodder_url = result["gpodderurl"] - gpodder_login = result["gpodderloginname"] - encrypted_token = result["gpoddertoken"] - else: - gpodder_url = result[0] - gpodder_login = result[1] - encrypted_token = result[2] - - # If no GPodder settings, return only local devices - if not gpodder_url or not gpodder_login: - logger.warning(f"User {user_id} has no GPodder settings") - return local_devices - - # Decrypt the token - from cryptography.fernet import Fernet - encryption_key = database_functions.functions.get_encryption_key(cnx, database_type) - encryption_key_bytes = base64.b64decode(encryption_key) - cipher_suite = Fernet(encryption_key_bytes) - - if encrypted_token: - decrypted_token_bytes = cipher_suite.decrypt(encrypted_token.encode()) - gpodder_token = decrypted_token_bytes.decode() - else: - gpodder_token = None - - # Create auth for requests - auth = HTTPBasicAuth(gpodder_login, gpodder_token) - - # Try to fetch remote devices - session = requests.Session() - - # First login to establish session - login_url = f"{gpodder_url}/api/2/auth/{gpodder_login}/login.json" - logger.info(f"Logging in to fetch devices: {login_url}") - - login_response = session.post(login_url, auth=auth) - login_response.raise_for_status() - - # Fetch devices from server - devices_url = f"{gpodder_url}/api/2/devices/{gpodder_login}.json" - logger.info(f"Fetching devices from: {devices_url}") - - devices_response = session.get(devices_url, auth=auth) - - if devices_response.status_code == 200: - try: - # Parse remote devices - remote_devices = devices_response.json() - logger.info(f"Found {len(remote_devices)} remote devices") - - # Create a map of local devices by name for quick lookup - local_devices_by_name = {device["name"]: device for device in local_devices} - - # Process remote devices - for remote_device in remote_devices: - # Extract device information - remote_name = remote_device.get("id", "") - - # Skip if we already have this device locally - if remote_name in local_devices_by_name: - continue - - # Convert to our format - device_info = { - "id": -1, # Use -1 to indicate it's a remote device not in our DB yet - "name": remote_name, - "type": remote_device.get("type", "unknown"), - "caption": remote_device.get("caption", None), - "last_sync": None, # We don't have this info - "is_active": True, - "is_remote": True # Flag to indicate it's a remote device - } - - # Add to our list - local_devices.append(device_info) - - logger.info(f"Returning {len(local_devices)} total devices") - return local_devices - - except Exception as e: - logger.error(f"Error parsing remote devices: {e}") - # Return only local devices on error - return local_devices - else: - logger.warning(f"Failed to fetch remote devices: {devices_response.status_code}") - # Return only local devices on error - return local_devices - - except Exception as e: - logger.error(f"Error fetching devices: {e}") - return local_devices - finally: - cursor.close() - -@gpodder_router.get("/default_device", response_model=Device) -async def get_default_device_endpoint_new( - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - """Get the default GPodder device for the user""" - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException( - status_code=403, - detail="Your API key is either invalid or does not have correct permission" - ) - # Get user ID from API key - user_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - # Get the default device - device = database_functions.functions.get_default_gpodder_device(cnx, database_type, user_id) - if device: - return Device( - id=device["id"], - name=device["name"], - type=device["type"], - caption=device["caption"], - last_sync=device["last_sync"].isoformat() if device["last_sync"] else None, - is_active=device["is_active"], - is_remote=device["is_remote"], - is_default=device["is_default"] - ) - else: - raise HTTPException( - status_code=404, - detail="No default GPodder device found" - ) - -@gpodder_router.post("/set_default/{device_id}", response_model=ApiResponse) -async def set_default_device_endpoint_new( - device_id: int, - device_name: Optional[str] = None, - is_remote: bool = False, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - """Set a GPodder device as the default for the user""" - import logging - logger = logging.getLogger(__name__) - - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException( - status_code=403, - detail="Your API key is either invalid or does not have correct permission" - ) - - # Get user ID from API key - user_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - - # Log information for debugging - logger.info(f"Setting default device with ID: {device_id}, name: {device_name}, is_remote: {is_remote}") - - # Handle remote devices (negative IDs) - if device_id < 0: - if not device_name: - # For remote devices, we need the device name - raise HTTPException( - status_code=400, - detail="Device name is required for remote devices" - ) - - # Use the dedicated function to handle remote devices - success, message, _ = database_functions.functions.handle_remote_device( - cnx, database_type, user_id, device_name - ) - - if not success: - raise HTTPException( - status_code=500, - detail=message - ) - - return ApiResponse( - success=True, - message="Default GPodder device set successfully" - ) - else: - # For local devices, proceed normally - success = database_functions.functions.set_default_gpodder_device(cnx, database_type, user_id, device_id) - - if success: - return ApiResponse( - success=True, - message="Default GPodder device set successfully" - ) - else: - raise HTTPException( - status_code=400, - detail="Failed to set default GPodder device" - ) - -@gpodder_router.post("/devices", response_model=Device) -async def create_device( - device: DeviceCreate, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - """Create a new GPodder device for a user""" - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException( - status_code=403, - detail="Your API key is either invalid or does not have correct permission" - ) - - # Check if the user has permission - elevated_access = await has_elevated_access(api_key, cnx) - if not elevated_access: - user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - if device.user_id != user_id_from_api_key: - raise HTTPException( - status_code=status.HTTP_403_FORBIDDEN, - detail="You are not authorized to create devices for this user" - ) - - # Create device - device_id = database_functions.functions.create_or_update_device( - cnx, - database_type, - device.user_id, - device.device_name, - device.device_type, - device.device_caption - ) - - if not device_id: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail="Failed to create device" - ) - - # Get the created device - devices = database_functions.functions.get_user_devices(cnx, database_type, device.user_id) - for d in devices: - if d["id"] == device_id: - return d - - # This should not happen - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail="Device created but not found" - ) - -@gpodder_router.post("/sync/force", response_model=ApiResponse) -async def force_full_sync( - sync_request: SyncRequest, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - """Force a full sync of all local podcasts to the GPodder server""" - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException( - status_code=403, - detail="Your API key is either invalid or does not have correct permission" - ) - - # Check if the user has permission - elevated_access = await has_elevated_access(api_key, cnx) - if not elevated_access: - user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - if sync_request.user_id != user_id_from_api_key: - raise HTTPException( - status_code=status.HTTP_403_FORBIDDEN, - detail="You are not authorized to force sync for this user" - ) - - # Get GPodder settings - user_id = sync_request.user_id - gpodder_settings = database_functions.functions.get_gpodder_settings(database_type, cnx, user_id) - - if not gpodder_settings or not gpodder_settings.get("gpodderurl"): - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail="GPodder settings not configured for this user" - ) - - # Get login name - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'SELECT GpodderLoginName FROM "Users" WHERE UserID = %s' - else: - query = "SELECT GpodderLoginName FROM Users WHERE UserID = %s" - - cursor.execute(query, (user_id,)) - result = cursor.fetchone() - cursor.close() - - if not result: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail="User not found" - ) - - gpodder_login = result[0] if isinstance(result, tuple) else result["gpodderloginname"] - - # Force sync - success = database_functions.functions.force_full_sync_to_gpodder( - database_type, - cnx, - user_id, - gpodder_settings.get("gpodderurl"), - gpodder_settings.get("gpoddertoken"), - gpodder_login, - sync_request.device_id, # Pass device_id from request - sync_request.device_name, # Pass device_name from request - sync_request.is_remote # Pass is_remote from request - ) - - if not success: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail="Failed to force synchronization" - ) - - return ApiResponse( - success=True, - message="Successfully synchronized all podcasts to GPodder" - ) - -@gpodder_router.post("/sync", response_model=ApiResponse) -async def sync_with_gpodder( - sync_request: SyncRequest, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - """Sync podcasts from GPodder to local database""" - print('running a sync') - user_id = sync_request.user_id - # Get user information - cursor = cnx.cursor() - if database_type == "postgresql": - query = ''' - SELECT GpodderLoginName, Pod_Sync_Type, GpodderUrl, GpodderToken, Username - FROM "Users" - WHERE UserID = %s - ''' - else: - query = ''' - SELECT GpodderLoginName, Pod_Sync_Type, GpodderUrl, GpodderToken, Username - FROM Users - WHERE UserID = %s - ''' - cursor.execute(query, (user_id,)) - result = cursor.fetchone() - cursor.close() - if not result: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail="User not found" - ) - print('grabbing user data') - # Extract user data - if isinstance(result, tuple): - gpodder_login = result[0] - pod_sync_type = result[1] - gpodder_url = result[2] - gpodder_token = result[3] - username = result[4] - else: - gpodder_login = result["gpodderloginname"] - pod_sync_type = result["pod_sync_type"] - gpodder_url = result["gpodderurl"] - gpodder_token = result["gpoddertoken"] - username = result["username"] - # Check if GPodder sync is enabled - if pod_sync_type not in ["gpodder", "both", "external"]: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail="GPodder sync not enabled for this user" - ) - # Check if this is internal sync (local gpodder API) - is_internal = gpodder_url == "http://localhost:8042" - print(f"Syncing with device_id: {sync_request.device_id}, device_name: {sync_request.device_name}, " - f"is_remote: {sync_request.is_remote}, is_internal: {is_internal}") - # For external sync, use the function with the appropriate parameters - print(f"Using external sync method for user {user_id} with URL {gpodder_url}") - # Get encryption key for token handling - cursor = cnx.cursor() - if database_type == "postgresql": - query = 'SELECT EncryptionKey FROM "AppSettings" WHERE AppSettingsID = 1' - else: - query = "SELECT EncryptionKey FROM AppSettings WHERE AppSettingsID = 1" - cursor.execute(query) - encryption_key_result = cursor.fetchone() - cursor.close() - encryption_key = None - if encryption_key_result: - if isinstance(encryption_key_result, tuple): - encryption_key = encryption_key_result[0] - else: - encryption_key = encryption_key_result["encryptionkey"] - print('Now doing refresh') - success = database_functions.functions.refresh_gpodder_subscription( - database_type, - cnx, - user_id, - gpodder_url, - gpodder_token, - gpodder_login, - pod_sync_type, - sync_request.device_id, - sync_request.device_name, - sync_request.is_remote - ) - print('refresh done') - if not success: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail="Failed to synchronize with GPodder" - ) - return ApiResponse( - success=True, - message="Successfully synchronized with GPodder" - ) - -@gpodder_router.get("/test-connection", response_model=ApiResponse) -async def test_gpodder_connection( - user_id: int, - gpodder_url: str, - gpodder_username: str, - gpodder_password: str, - cnx=Depends(get_database_connection), - api_key: str = Depends(get_api_key_from_header) -): - """Test connection to GPodder server""" - import requests - from requests.auth import HTTPBasicAuth - import logging - - logger = logging.getLogger(__name__) - - is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key) - if not is_valid_key: - raise HTTPException( - status_code=403, - detail="Your API key is either invalid or does not have correct permission" - ) - - # Check if the user has permission - elevated_access = await has_elevated_access(api_key, cnx) - if not elevated_access: - user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key) - if user_id != user_id_from_api_key: - raise HTTPException( - status_code=status.HTTP_403_FORBIDDEN, - detail="You are not authorized to test connection for this user" - ) - - try: - # Create a session and save cookies - session = requests.Session() - auth = HTTPBasicAuth(gpodder_username, gpodder_password) - - # Step 1: Login - login_url = f"{gpodder_url}/api/2/auth/{gpodder_username}/login.json" - logger.info(f"Testing login at: {login_url}") - - login_response = session.post(login_url, auth=auth) - if login_response.status_code != 200: - logger.error(f"Login failed: {login_response.status_code} - {login_response.text}") - return ApiResponse( - success=False, - message=f"Failed to login to GPodder server: {login_response.status_code} {login_response.reason}", - data=None - ) - - logger.info(f"Login successful: {login_response.status_code}") - logger.info(f"Cookies after login: {session.cookies.get_dict()}") - - # Try multiple approaches to verify subscription access - - # 1. First try to get devices (no device parameter needed) - logger.info("Attempting to get list of devices...") - devices_url = f"{gpodder_url}/api/2/devices/{gpodder_username}.json" - devices_response = session.get(devices_url) - - if devices_response.status_code == 200: - logger.info(f"Devices fetch successful: {devices_response.status_code}") - - try: - devices_data = devices_response.json() - logger.info(f"Found {len(devices_data)} devices") - - # If devices exist, try to use the first one - if devices_data and len(devices_data) > 0: - device_id = devices_data[0].get('id', 'default') - logger.info(f"Using existing device: {device_id}") - - # Try to get subscriptions with this device - device_subs_url = f"{gpodder_url}/api/2/subscriptions/{gpodder_username}/{device_id}.json?since=0" - device_subs_response = session.get(device_subs_url) - - if device_subs_response.status_code == 200: - return ApiResponse( - success=True, - message="Successfully connected to GPodder server and verified access using existing device.", - data={ - "auth_type": "session", - "device_id": device_id, - "has_devices": True - } - ) - except Exception as device_err: - logger.warning(f"Error parsing devices: {str(device_err)}") - - # 2. Try with "default" device name - device_name = "default" - subscriptions_url = f"{gpodder_url}/api/2/subscriptions/{gpodder_username}/{device_name}.json?since=0" - logger.info(f"Checking subscriptions with default device: {subscriptions_url}") - - subscriptions_response = session.get(subscriptions_url) - if subscriptions_response.status_code == 200: - logger.info(f"Subscriptions check successful with default device: {subscriptions_response.status_code}") - - return ApiResponse( - success=True, - message="Successfully connected to GPodder server and verified access with default device.", - data={ - "auth_type": "session", - "device_name": device_name - } - ) - - # 3. As a last resort, try without device name - simple_url = f"{gpodder_url}/api/2/subscriptions/{gpodder_username}.json" - logger.info(f"Checking subscriptions without device: {simple_url}") - - simple_response = session.get(simple_url) - if simple_response.status_code == 200: - logger.info(f"Subscriptions check successful without device: {simple_response.status_code}") - - return ApiResponse( - success=True, - message="Successfully connected to GPodder server and verified access. No device required.", - data={ - "auth_type": "session", - "device_required": False - } - ) - - # If we got here, login worked but subscription access didn't - logger.warning("Login successful but couldn't access subscriptions with any method") - return ApiResponse( - success=True, - message="Connected to GPodder server but couldn't verify subscription access. Login credentials are valid.", - data={ - "auth_type": "session", - "warning": "Could not verify subscription access" - } - ) - - except Exception as e: - logger.error(f"Connection test failed: {str(e)}") - return ApiResponse( - success=False, - message=f"Failed to connect to GPodder server: {str(e)}", - data=None - ) diff --git a/database_functions/import_progress.py b/database_functions/import_progress.py deleted file mode 100644 index c6d9870a..00000000 --- a/database_functions/import_progress.py +++ /dev/null @@ -1,35 +0,0 @@ -import json -from typing import Tuple -from database_functions.valkey_client import valkey_client - -class ImportProgressManager: - def start_import(self, user_id: int, total_podcasts: int): - valkey_client.set(f"import_progress:{user_id}", json.dumps({ - "current": 0, - "total": total_podcasts, - "current_podcast": "" - })) - - def update_progress(self, user_id: int, current: int, current_podcast: str): - progress_json = valkey_client.get(f"import_progress:{user_id}") - if progress_json: - progress = json.loads(progress_json) - progress.update({ - "current": current, - "current_podcast": current_podcast - }) - valkey_client.set(f"import_progress:{user_id}", json.dumps(progress)) - - def get_progress(self, user_id: int) -> Tuple[int, int, str]: - progress_json = valkey_client.get(f"import_progress:{user_id}") - if progress_json: - progress = json.loads(progress_json) - return (progress.get("current", 0), - progress.get("total", 0), - progress.get("current_podcast", "")) - return (0, 0, "") - - def clear_progress(self, user_id: int): - valkey_client.delete(f"import_progress:{user_id}") - -import_progress_manager = ImportProgressManager() diff --git a/database_functions/migration_definitions.py b/database_functions/migration_definitions.py index 4b08e6ef..a9fa4584 100644 --- a/database_functions/migration_definitions.py +++ b/database_functions/migration_definitions.py @@ -1958,12 +1958,2130 @@ def migration_018_gpodder_sync_timestamp(conn, db_type: str): cursor.close() +@register_migration("019", "fix_encryption_key_storage", "Convert EncryptionKey from binary to text format for consistency", requires=["001"]) +def migration_019_fix_encryption_key_storage(conn, db_type: str): + """Convert EncryptionKey storage from binary to text format""" + cursor = conn.cursor() + + try: + if db_type == "postgresql": + # First, get the current encryption key value as bytes + cursor.execute('SELECT encryptionkey FROM "AppSettings" WHERE appsettingsid = 1') + result = cursor.fetchone() + + if result and result[0]: + # Convert bytes to string + key_bytes = result[0] + if isinstance(key_bytes, bytes): + key_string = key_bytes.decode('utf-8') + else: + key_string = str(key_bytes) + + # Drop and recreate column as TEXT + cursor.execute('ALTER TABLE "AppSettings" DROP COLUMN encryptionkey') + cursor.execute('ALTER TABLE "AppSettings" ADD COLUMN encryptionkey TEXT') + + # Insert the key back as text + cursor.execute('UPDATE "AppSettings" SET encryptionkey = %s WHERE appsettingsid = 1', (key_string,)) + logger.info("Converted PostgreSQL encryptionkey from BYTEA to TEXT") + else: + # No existing key, just change the column type + cursor.execute('ALTER TABLE "AppSettings" DROP COLUMN encryptionkey') + cursor.execute('ALTER TABLE "AppSettings" ADD COLUMN encryptionkey TEXT') + logger.info("Changed PostgreSQL encryptionkey column to TEXT (no existing data)") + + else: # MySQL + # First, get the current encryption key value + cursor.execute('SELECT EncryptionKey FROM AppSettings WHERE AppSettingsID = 1') + result = cursor.fetchone() + + if result and result[0]: + # Convert binary to string + key_data = result[0] + if isinstance(key_data, bytes): + # Remove null padding and decode + key_string = key_data.rstrip(b'\x00').decode('utf-8') + else: + key_string = str(key_data) + + # Change column type and update value + cursor.execute('ALTER TABLE AppSettings MODIFY EncryptionKey VARCHAR(255)') + cursor.execute('UPDATE AppSettings SET EncryptionKey = %s WHERE AppSettingsID = 1', (key_string,)) + logger.info("Converted MySQL EncryptionKey from BINARY to VARCHAR") + else: + # No existing key, just change the column type + cursor.execute('ALTER TABLE AppSettings MODIFY EncryptionKey VARCHAR(255)') + logger.info("Changed MySQL EncryptionKey column to VARCHAR (no existing data)") + + logger.info("Encryption key storage migration completed successfully") + + except Exception as e: + logger.error(f"Error in encryption key migration: {e}") + raise + finally: + cursor.close() + + +@register_migration("020", "add_default_gpodder_device", "Add DefaultGpodderDevice column to Users table for tracking user's selected GPodder device", requires=["001"]) +def migration_020_add_default_gpodder_device(conn, db_type: str): + """Add DefaultGpodderDevice column to Users table""" + cursor = conn.cursor() + + try: + if db_type == "postgresql": + # Add defaultgpodderdevice column to Users table + safe_execute_sql(cursor, 'ALTER TABLE "Users" ADD COLUMN defaultgpodderdevice VARCHAR(255)') + logger.info("Added defaultgpodderdevice column to Users table (PostgreSQL)") + + else: # MySQL + # Add DefaultGpodderDevice column to Users table + safe_execute_sql(cursor, 'ALTER TABLE Users ADD COLUMN DefaultGpodderDevice VARCHAR(255)') + logger.info("Added DefaultGpodderDevice column to Users table (MySQL)") + + logger.info("Default GPodder device column migration completed successfully") + + except Exception as e: + logger.error(f"Error in default GPodder device migration: {e}") + raise + finally: + cursor.close() + + +@register_migration("021", "limit_system_playlists_episodes", "Add MaxEpisodes limit to high-volume system playlists", requires=["010"]) +def migration_021_limit_system_playlists_episodes(conn, db_type: str): + """Add MaxEpisodes limit to Commuter Mix, Longform, and Weekend Marathon system playlists""" + cursor = conn.cursor() + + try: + logger.info("Starting system playlist episodes limit migration") + + # Define the playlists to update with 1000 episode limit + playlists_to_update = ['Commuter Mix', 'Longform', 'Weekend Marathon'] + + if db_type == "postgresql": + for playlist_name in playlists_to_update: + safe_execute_sql(cursor, ''' + UPDATE "Playlists" + SET maxepisodes = 1000 + WHERE name = %s AND issystemplaylist = TRUE + ''', (playlist_name,)) + logger.info(f"Updated {playlist_name} system playlist with maxepisodes=1000 (PostgreSQL)") + + else: # MySQL + for playlist_name in playlists_to_update: + safe_execute_sql(cursor, ''' + UPDATE Playlists + SET MaxEpisodes = 1000 + WHERE Name = %s AND IsSystemPlaylist = TRUE + ''', (playlist_name,)) + logger.info(f"Updated {playlist_name} system playlist with MaxEpisodes=1000 (MySQL)") + + logger.info("System playlist episodes limit migration completed successfully") + + except Exception as e: + logger.error(f"Error in system playlist episodes limit migration: {e}") + raise + finally: + cursor.close() + + +@register_migration("022", "expand_downloaded_location_column", "Expand DownloadedLocation column size to handle long file paths", requires=["007"]) +def migration_022_expand_downloaded_location_column(conn, db_type: str): + """Expand DownloadedLocation column size to handle long file paths""" + cursor = conn.cursor() + + try: + logger.info("Starting downloaded location column expansion migration") + + if db_type == "postgresql": + # Expand DownloadedLocation in DownloadedEpisodes table + safe_execute_sql(cursor, ''' + ALTER TABLE "DownloadedEpisodes" + ALTER COLUMN downloadedlocation TYPE TEXT + ''', conn=conn) + logger.info("Expanded downloadedlocation column in DownloadedEpisodes table (PostgreSQL)") + + # Expand DownloadedLocation in DownloadedVideos table + safe_execute_sql(cursor, ''' + ALTER TABLE "DownloadedVideos" + ALTER COLUMN downloadedlocation TYPE TEXT + ''', conn=conn) + logger.info("Expanded downloadedlocation column in DownloadedVideos table (PostgreSQL)") + + else: # MySQL + # Expand DownloadedLocation in DownloadedEpisodes table + safe_execute_sql(cursor, ''' + ALTER TABLE DownloadedEpisodes + MODIFY DownloadedLocation TEXT + ''', conn=conn) + logger.info("Expanded DownloadedLocation column in DownloadedEpisodes table (MySQL)") + + # Expand DownloadedLocation in DownloadedVideos table + safe_execute_sql(cursor, ''' + ALTER TABLE DownloadedVideos + MODIFY DownloadedLocation TEXT + ''', conn=conn) + logger.info("Expanded DownloadedLocation column in DownloadedVideos table (MySQL)") + + logger.info("Downloaded location column expansion migration completed successfully") + + except Exception as e: + logger.error(f"Error in downloaded location column expansion migration: {e}") + raise + finally: + cursor.close() + + +@register_migration("023", "add_missing_performance_indexes", "Add missing performance indexes for queue, saved, downloaded, and history tables", requires=["006", "007"]) +def migration_023_add_missing_performance_indexes(conn, db_type: str): + """Add missing performance indexes for queue, saved, downloaded, and history tables""" + cursor = conn.cursor() + + try: + logger.info("Starting missing performance indexes migration") + + table_prefix = '"' if db_type == 'postgresql' else '' + table_suffix = '"' if db_type == 'postgresql' else '' + + # EpisodeQueue indexes (critical for get_queued_episodes performance) + safe_add_index(cursor, db_type, f'CREATE INDEX idx_episodequeue_userid ON {table_prefix}EpisodeQueue{table_suffix}(UserID)', 'idx_episodequeue_userid') + safe_add_index(cursor, db_type, f'CREATE INDEX idx_episodequeue_episodeid ON {table_prefix}EpisodeQueue{table_suffix}(EpisodeID)', 'idx_episodequeue_episodeid') + safe_add_index(cursor, db_type, f'CREATE INDEX idx_episodequeue_queueposition ON {table_prefix}EpisodeQueue{table_suffix}(QueuePosition)', 'idx_episodequeue_queueposition') + safe_add_index(cursor, db_type, f'CREATE INDEX idx_episodequeue_userid_queueposition ON {table_prefix}EpisodeQueue{table_suffix}(UserID, QueuePosition)', 'idx_episodequeue_userid_queueposition') + + # SavedEpisodes indexes (for return_episodes LEFT JOIN performance) + safe_add_index(cursor, db_type, f'CREATE INDEX idx_savedepisodes_userid ON {table_prefix}SavedEpisodes{table_suffix}(UserID)', 'idx_savedepisodes_userid') + safe_add_index(cursor, db_type, f'CREATE INDEX idx_savedepisodes_episodeid ON {table_prefix}SavedEpisodes{table_suffix}(EpisodeID)', 'idx_savedepisodes_episodeid') + safe_add_index(cursor, db_type, f'CREATE INDEX idx_savedepisodes_userid_episodeid ON {table_prefix}SavedEpisodes{table_suffix}(UserID, EpisodeID)', 'idx_savedepisodes_userid_episodeid') + + # SavedVideos indexes (for YouTube video queries) + safe_add_index(cursor, db_type, f'CREATE INDEX idx_savedvideos_userid ON {table_prefix}SavedVideos{table_suffix}(UserID)', 'idx_savedvideos_userid') + safe_add_index(cursor, db_type, f'CREATE INDEX idx_savedvideos_videoid ON {table_prefix}SavedVideos{table_suffix}(VideoID)', 'idx_savedvideos_videoid') + safe_add_index(cursor, db_type, f'CREATE INDEX idx_savedvideos_userid_videoid ON {table_prefix}SavedVideos{table_suffix}(UserID, VideoID)', 'idx_savedvideos_userid_videoid') + + # DownloadedEpisodes indexes (for return_episodes LEFT JOIN performance) + safe_add_index(cursor, db_type, f'CREATE INDEX idx_downloadedepisodes_userid ON {table_prefix}DownloadedEpisodes{table_suffix}(UserID)', 'idx_downloadedepisodes_userid') + safe_add_index(cursor, db_type, f'CREATE INDEX idx_downloadedepisodes_episodeid ON {table_prefix}DownloadedEpisodes{table_suffix}(EpisodeID)', 'idx_downloadedepisodes_episodeid') + safe_add_index(cursor, db_type, f'CREATE INDEX idx_downloadedepisodes_userid_episodeid ON {table_prefix}DownloadedEpisodes{table_suffix}(UserID, EpisodeID)', 'idx_downloadedepisodes_userid_episodeid') + + # DownloadedVideos indexes (for YouTube video queries) + safe_add_index(cursor, db_type, f'CREATE INDEX idx_downloadedvideos_userid ON {table_prefix}DownloadedVideos{table_suffix}(UserID)', 'idx_downloadedvideos_userid') + safe_add_index(cursor, db_type, f'CREATE INDEX idx_downloadedvideos_videoid ON {table_prefix}DownloadedVideos{table_suffix}(VideoID)', 'idx_downloadedvideos_videoid') + safe_add_index(cursor, db_type, f'CREATE INDEX idx_downloadedvideos_userid_videoid ON {table_prefix}DownloadedVideos{table_suffix}(UserID, VideoID)', 'idx_downloadedvideos_userid_videoid') + + # UserEpisodeHistory indexes (for return_episodes LEFT JOIN performance) + safe_add_index(cursor, db_type, f'CREATE INDEX idx_userepisodehistory_userid ON {table_prefix}UserEpisodeHistory{table_suffix}(UserID)', 'idx_userepisodehistory_userid') + safe_add_index(cursor, db_type, f'CREATE INDEX idx_userepisodehistory_episodeid ON {table_prefix}UserEpisodeHistory{table_suffix}(EpisodeID)', 'idx_userepisodehistory_episodeid') + safe_add_index(cursor, db_type, f'CREATE INDEX idx_userepisodehistory_userid_episodeid ON {table_prefix}UserEpisodeHistory{table_suffix}(UserID, EpisodeID)', 'idx_userepisodehistory_userid_episodeid') + + # UserVideoHistory indexes (for YouTube video queries) + safe_add_index(cursor, db_type, f'CREATE INDEX idx_uservideohistory_userid ON {table_prefix}UserVideoHistory{table_suffix}(UserID)', 'idx_uservideohistory_userid') + safe_add_index(cursor, db_type, f'CREATE INDEX idx_uservideohistory_videoid ON {table_prefix}UserVideoHistory{table_suffix}(VideoID)', 'idx_uservideohistory_videoid') + safe_add_index(cursor, db_type, f'CREATE INDEX idx_uservideohistory_userid_videoid ON {table_prefix}UserVideoHistory{table_suffix}(UserID, VideoID)', 'idx_uservideohistory_userid_videoid') + + # Additional useful indexes for query performance + safe_add_index(cursor, db_type, f'CREATE INDEX idx_episodes_completed ON {table_prefix}Episodes{table_suffix}(Completed)', 'idx_episodes_completed') + safe_add_index(cursor, db_type, f'CREATE INDEX idx_youtubevideos_completed ON {table_prefix}YouTubeVideos{table_suffix}(Completed)', 'idx_youtubevideos_completed') + safe_add_index(cursor, db_type, f'CREATE INDEX idx_youtubevideos_podcastid ON {table_prefix}YouTubeVideos{table_suffix}(PodcastID)', 'idx_youtubevideos_podcastid') + safe_add_index(cursor, db_type, f'CREATE INDEX idx_youtubevideos_publishedat ON {table_prefix}YouTubeVideos{table_suffix}(PublishedAt)', 'idx_youtubevideos_publishedat') + + logger.info("Missing performance indexes migration completed successfully") + + except Exception as e: + logger.error(f"Error in missing performance indexes migration: {e}") + raise + finally: + cursor.close() + + +@register_migration("025", "fix_people_table_columns", "Add missing PersonImg, PeopleDBID, and AssociatedPodcasts columns to existing People tables", requires=["009"]) +def migration_025_fix_people_table_columns(conn, db_type: str): + """Add missing columns to existing People tables for users who upgraded from older versions""" + cursor = conn.cursor() + + try: + logger.info("Starting People table columns fix migration") + + if db_type == "postgresql": + # Check if PersonImg column exists, if not add it + safe_execute_sql(cursor, ''' + DO $$ + BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_name = 'People' AND column_name = 'personimg' + ) THEN + ALTER TABLE "People" ADD COLUMN PersonImg TEXT; + END IF; + END $$; + ''', conn=conn) + + # Check if PeopleDBID column exists, if not add it + safe_execute_sql(cursor, ''' + DO $$ + BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_name = 'People' AND column_name = 'peopledbid' + ) THEN + ALTER TABLE "People" ADD COLUMN PeopleDBID INT; + END IF; + END $$; + ''', conn=conn) + + # Check if AssociatedPodcasts column exists, if not add it + safe_execute_sql(cursor, ''' + DO $$ + BEGIN + IF NOT EXISTS ( + SELECT 1 FROM information_schema.columns + WHERE table_name = 'People' AND column_name = 'associatedpodcasts' + ) THEN + ALTER TABLE "People" ADD COLUMN AssociatedPodcasts TEXT; + END IF; + END $$; + ''', conn=conn) + + logger.info("Added missing columns to People table (PostgreSQL)") + + else: # MySQL + # For MySQL, use IF NOT EXISTS syntax or try-catch approach + try: + safe_execute_sql(cursor, 'ALTER TABLE People ADD COLUMN PersonImg TEXT', conn=conn) + logger.info("Added PersonImg column to People table (MySQL)") + except Exception: + logger.debug("PersonImg column already exists in People table (MySQL)") + + try: + safe_execute_sql(cursor, 'ALTER TABLE People ADD COLUMN PeopleDBID INT', conn=conn) + logger.info("Added PeopleDBID column to People table (MySQL)") + except Exception: + logger.debug("PeopleDBID column already exists in People table (MySQL)") + + try: + safe_execute_sql(cursor, 'ALTER TABLE People ADD COLUMN AssociatedPodcasts TEXT', conn=conn) + logger.info("Added AssociatedPodcasts column to People table (MySQL)") + except Exception: + logger.debug("AssociatedPodcasts column already exists in People table (MySQL)") + + logger.info("People table columns fix migration completed successfully") + + except Exception as e: + logger.error(f"Error in People table columns fix migration: {e}") + raise + finally: + cursor.close() + + +@register_migration("026", "limit_quick_listens_episodes", "Add MaxEpisodes limit to Quick Listens system playlist", requires=["012"]) +def migration_026_limit_quick_listens_episodes(conn, db_type: str): + """Add MaxEpisodes limit to Quick Listens system playlist""" + cursor = conn.cursor() + + try: + logger.info("Starting Quick Listens MaxEpisodes limit migration") + + if db_type == "postgresql": + # Update Quick Listens playlist to have maxepisodes = 1000 + safe_execute_sql(cursor, ''' + UPDATE "Playlists" + SET maxepisodes = 1000 + WHERE name = 'Quick Listens' AND issystemplaylist = TRUE + ''', conn=conn) + logger.info("Updated Quick Listens system playlist maxepisodes=1000 (PostgreSQL)") + + else: # MySQL + # Update Quick Listens playlist to have MaxEpisodes = 1000 + safe_execute_sql(cursor, ''' + UPDATE Playlists + SET MaxEpisodes = 1000 + WHERE Name = 'Quick Listens' AND IsSystemPlaylist = TRUE + ''', conn=conn) + logger.info("Updated Quick Listens system playlist MaxEpisodes=1000 (MySQL)") + + logger.info("Quick Listens MaxEpisodes limit migration completed successfully") + + except Exception as e: + logger.error(f"Error in Quick Listens MaxEpisodes limit migration: {e}") + raise + finally: + cursor.close() + + def register_all_migrations(): """Register all migrations with the migration manager""" # Migrations are auto-registered via decorators logger.info("All migrations registered") +@register_migration("024", "fix_quick_listens_min_duration", "Update Quick Listens playlist to exclude 0-duration episodes", requires=["012"]) +def migration_024_fix_quick_listens_min_duration(conn, db_type: str): + """Update Quick Listens system playlist to exclude episodes with 0 duration""" + cursor = conn.cursor() + + try: + logger.info("Starting Quick Listens min duration fix migration") + + if db_type == "postgresql": + # Update Quick Listens playlist to have min_duration = 1 second + safe_execute_sql(cursor, ''' + UPDATE "Playlists" + SET minduration = 1 + WHERE name = 'Quick Listens' AND issystemplaylist = TRUE + ''', conn=conn) + logger.info("Updated Quick Listens system playlist minduration=1 (PostgreSQL)") + + else: # MySQL + # Update Quick Listens playlist to have MinDuration = 1 second + safe_execute_sql(cursor, ''' + UPDATE Playlists + SET MinDuration = 1 + WHERE Name = 'Quick Listens' AND IsSystemPlaylist = TRUE + ''', conn=conn) + logger.info("Updated Quick Listens system playlist MinDuration=1 (MySQL)") + + logger.info("Quick Listens min duration fix migration completed successfully") + + except Exception as e: + logger.error(f"Error in Quick Listens min duration fix migration: {e}") + raise + finally: + cursor.close() + + +@register_migration("027", "add_scheduled_backups_table", "Create ScheduledBackups table for automated backup management", requires=["026"]) +def migration_027_add_scheduled_backups_table(conn, db_type: str): + """Create ScheduledBackups table for automated backup management""" + cursor = conn.cursor() + + try: + logger.info("Starting ScheduledBackups table creation migration") + + if db_type == "postgresql": + # Create ScheduledBackups table for PostgreSQL + safe_execute_sql(cursor, ''' + CREATE TABLE IF NOT EXISTS "ScheduledBackups" ( + id SERIAL PRIMARY KEY, + userid INTEGER NOT NULL, + cron_schedule VARCHAR(50) NOT NULL, + enabled BOOLEAN NOT NULL DEFAULT false, + created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + UNIQUE(userid), + FOREIGN KEY (userid) REFERENCES "Users"(userid) ON DELETE CASCADE + ) + ''', conn=conn) + logger.info("Created ScheduledBackups table (PostgreSQL)") + + # Create index for performance + safe_execute_sql(cursor, ''' + CREATE INDEX IF NOT EXISTS idx_scheduled_backups_enabled + ON "ScheduledBackups"(enabled) + ''', conn=conn) + logger.info("Created index on enabled column (PostgreSQL)") + + else: # MySQL + # Create ScheduledBackups table for MySQL + safe_execute_sql(cursor, ''' + CREATE TABLE IF NOT EXISTS ScheduledBackups ( + ID INT AUTO_INCREMENT PRIMARY KEY, + UserID INT NOT NULL, + CronSchedule VARCHAR(50) NOT NULL, + Enabled BOOLEAN NOT NULL DEFAULT FALSE, + CreatedAt TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + UpdatedAt TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + UNIQUE KEY unique_user (UserID), + FOREIGN KEY (UserID) REFERENCES Users(UserID) ON DELETE CASCADE + ) + ''', conn=conn) + logger.info("Created ScheduledBackups table (MySQL)") + + # Create index for performance + safe_execute_sql(cursor, ''' + CREATE INDEX idx_scheduled_backups_enabled + ON ScheduledBackups(Enabled) + ''', conn=conn) + logger.info("Created index on Enabled column (MySQL)") + + logger.info("ScheduledBackups table creation migration completed successfully") + + except Exception as e: + logger.error(f"Error in ScheduledBackups table creation migration: {e}") + raise + finally: + cursor.close() + + +@register_migration("028", "add_ignore_podcast_index_column", "Add IgnorePodcastIndex column to Podcasts table", requires=["027"]) +def migration_028_add_ignore_podcast_index_column(conn, db_type: str): + """ + Migration 028: Add IgnorePodcastIndex column to Podcasts table + """ + logger.info("Starting migration 028: Add IgnorePodcastIndex column to Podcasts table") + cursor = conn.cursor() + + try: + if db_type == 'postgresql': + safe_execute_sql(cursor, ''' + ALTER TABLE "Podcasts" + ADD COLUMN IF NOT EXISTS IgnorePodcastIndex BOOLEAN DEFAULT FALSE + ''', conn=conn) + logger.info("Added IgnorePodcastIndex column to Podcasts table (PostgreSQL)") + + else: # MySQL + # Check if column already exists to avoid duplicate column error + safe_execute_sql(cursor, ''' + SELECT COUNT(*) + FROM information_schema.columns + WHERE table_name = 'Podcasts' + AND column_name = 'IgnorePodcastIndex' + AND table_schema = DATABASE() + ''', conn=conn) + + result = cursor.fetchone() + if result[0] == 0: # Column doesn't exist + safe_execute_sql(cursor, ''' + ALTER TABLE Podcasts + ADD COLUMN IgnorePodcastIndex TINYINT(1) DEFAULT 0 + ''', conn=conn) + logger.info("Added IgnorePodcastIndex column to Podcasts table (MySQL)") + else: + logger.info("IgnorePodcastIndex column already exists in Podcasts table (MySQL)") + + logger.info("IgnorePodcastIndex column migration completed successfully") + + except Exception as e: + logger.error(f"Error in IgnorePodcastIndex column migration: {e}") + raise + finally: + cursor.close() + + +@register_migration("029", "fix_people_episodes_table_schema", "Fix PeopleEpisodes table schema to match expected format", requires=["009"]) +def migration_029_fix_people_episodes_table_schema(conn, db_type: str): + """ + Migration 029: Fix PeopleEpisodes table schema + + This migration ensures the PeopleEpisodes table has the correct schema with all required columns. + Some databases may have an incomplete PeopleEpisodes table from migration 009. + """ + logger.info("Starting migration 029: Fix PeopleEpisodes table schema") + cursor = conn.cursor() + + try: + if db_type == 'postgresql': + # For PostgreSQL, we'll recreate the table with the correct schema + # First check if table exists and get its current structure + safe_execute_sql(cursor, ''' + SELECT column_name + FROM information_schema.columns + WHERE table_name = 'PeopleEpisodes' + AND table_schema = current_schema() + ''', conn=conn) + + existing_columns = [row[0] for row in cursor.fetchall()] + + if 'podcastid' not in [col.lower() for col in existing_columns]: + logger.info("PeopleEpisodes table missing required columns, recreating...") + + # Drop existing table if it exists with wrong schema + safe_execute_sql(cursor, 'DROP TABLE IF EXISTS "PeopleEpisodes"', conn=conn) + + # Create with correct schema + safe_execute_sql(cursor, ''' + CREATE TABLE "PeopleEpisodes" ( + EpisodeID SERIAL PRIMARY KEY, + PersonID INT, + PodcastID INT, + EpisodeTitle TEXT, + EpisodeDescription TEXT, + EpisodeURL TEXT, + EpisodeArtwork TEXT, + EpisodePubDate TIMESTAMP, + EpisodeDuration INT, + AddedDate TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (PersonID) REFERENCES "People"(PersonID), + FOREIGN KEY (PodcastID) REFERENCES "Podcasts"(PodcastID) + ) + ''', conn=conn) + logger.info("Recreated PeopleEpisodes table with correct schema (PostgreSQL)") + else: + logger.info("PeopleEpisodes table already has correct schema (PostgreSQL)") + + else: # MySQL + # For MySQL, check current table structure + safe_execute_sql(cursor, ''' + SELECT column_name + FROM information_schema.columns + WHERE table_name = 'PeopleEpisodes' + AND table_schema = DATABASE() + ''', conn=conn) + + existing_columns = [row[0] for row in cursor.fetchall()] + logger.info(f"Current PeopleEpisodes columns: {existing_columns}") + + if 'PodcastID' not in existing_columns: + logger.info("PeopleEpisodes table missing required columns, recreating...") + + # Backup any existing data first (if the table has useful data) + safe_execute_sql(cursor, ''' + CREATE TABLE IF NOT EXISTS PeopleEpisodes_backup AS + SELECT * FROM PeopleEpisodes + ''', conn=conn) + logger.info("Created backup of existing PeopleEpisodes table") + + # Drop existing table + safe_execute_sql(cursor, 'DROP TABLE IF EXISTS PeopleEpisodes', conn=conn) + + # Create with correct schema + safe_execute_sql(cursor, ''' + CREATE TABLE PeopleEpisodes ( + EpisodeID INT AUTO_INCREMENT PRIMARY KEY, + PersonID INT, + PodcastID INT, + EpisodeTitle TEXT, + EpisodeDescription TEXT, + EpisodeURL TEXT, + EpisodeArtwork TEXT, + EpisodePubDate TIMESTAMP, + EpisodeDuration INT, + AddedDate TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (PersonID) REFERENCES People(PersonID), + FOREIGN KEY (PodcastID) REFERENCES Podcasts(PodcastID) + ) + ''', conn=conn) + logger.info("Recreated PeopleEpisodes table with correct schema (MySQL)") + else: + logger.info("PeopleEpisodes table already has correct schema (MySQL)") + + logger.info("PeopleEpisodes table schema fix completed successfully") + + except Exception as e: + logger.error(f"Error in PeopleEpisodes table schema fix migration: {e}") + raise + finally: + cursor.close() + + +@register_migration("030", "add_user_language_preference", "Add Language column to Users table for user-specific language preferences", requires=["001"]) +def migration_030_add_user_language_preference(conn, db_type: str): + """Add Language column to Users table for user-specific language preferences""" + cursor = conn.cursor() + + try: + # Get the default language from environment variable, fallback to 'en' + default_language = os.environ.get("DEFAULT_LANGUAGE", "en") + + # Validate language code (basic validation) + if not default_language or len(default_language) > 10: + default_language = "en" + + logger.info(f"Adding Language column to Users table with default '{default_language}'") + + if db_type == 'postgresql': + # Add Language column with default from environment variable + safe_execute_sql(cursor, f''' + ALTER TABLE "Users" + ADD COLUMN IF NOT EXISTS Language VARCHAR(10) DEFAULT '{default_language}' + ''', conn=conn) + + # Add comment to document the column + safe_execute_sql(cursor, ''' + COMMENT ON COLUMN "Users".Language IS 'ISO 639-1 language code for user interface language preference' + ''', conn=conn) + + else: # mysql/mariadb + # Check if column exists first + cursor.execute(""" + SELECT COUNT(*) + FROM INFORMATION_SCHEMA.COLUMNS + WHERE TABLE_SCHEMA = DATABASE() + AND TABLE_NAME = 'Users' + AND COLUMN_NAME = 'Language' + """) + + if cursor.fetchone()[0] == 0: + safe_execute_sql(cursor, f''' + ALTER TABLE Users + ADD COLUMN Language VARCHAR(10) DEFAULT '{default_language}' + COMMENT 'ISO 639-1 language code for user interface language preference' + ''', conn=conn) + + logger.info(f"Successfully added Language column to Users table with default '{default_language}'") + + except Exception as e: + logger.error(f"Error in migration 030: {e}") + raise + finally: + cursor.close() + + +@register_migration("031", "add_oidc_env_initialized_column", "Add InitializedFromEnv column to OIDCProviders table to track env-initialized providers", requires=["001"]) +def migration_031_add_oidc_env_initialized_column(conn, db_type: str): + """Add InitializedFromEnv column to OIDCProviders table to track providers created from environment variables""" + cursor = conn.cursor() + + try: + logger.info("Adding InitializedFromEnv column to OIDCProviders table") + + if db_type == 'postgresql': + # Add InitializedFromEnv column (defaults to false for existing providers) + safe_execute_sql(cursor, ''' + ALTER TABLE "OIDCProviders" + ADD COLUMN IF NOT EXISTS InitializedFromEnv BOOLEAN DEFAULT false + ''', conn=conn) + + # Add comment to document the column + safe_execute_sql(cursor, ''' + COMMENT ON COLUMN "OIDCProviders".InitializedFromEnv IS 'Indicates if this provider was created from environment variables and should not be removable via UI' + ''', conn=conn) + + else: # mysql/mariadb + # Check if column exists first + cursor.execute(""" + SELECT COUNT(*) + FROM INFORMATION_SCHEMA.COLUMNS + WHERE TABLE_SCHEMA = DATABASE() + AND TABLE_NAME = 'OIDCProviders' + AND COLUMN_NAME = 'InitializedFromEnv' + """) + + if cursor.fetchone()[0] == 0: + safe_execute_sql(cursor, ''' + ALTER TABLE OIDCProviders + ADD COLUMN InitializedFromEnv TINYINT(1) DEFAULT 0 + COMMENT 'Indicates if this provider was created from environment variables and should not be removable via UI' + ''', conn=conn) + + logger.info("Successfully added InitializedFromEnv column to OIDCProviders table") + except Exception as e: + logger.error(f"Error in migration 031: {e}") + raise + finally: + cursor.close() + + +@register_migration("032", "create_user_default_playlists", "Create default playlists for all existing users", requires=["012"]) +def migration_032_create_user_default_playlists(conn, db_type: str): + """Create default playlists for all existing users, eliminating system playlists""" + cursor = conn.cursor() + + try: + logger.info("Starting user default playlists migration") + + # First, add the episode_count column to Playlists table if it doesn't exist + if db_type == "postgresql": + # Check if episode_count column exists + cursor.execute(""" + SELECT column_name FROM information_schema.columns + WHERE table_name = 'Playlists' + AND column_name = 'episodecount' + """) + column_exists = len(cursor.fetchall()) > 0 + + if not column_exists: + cursor.execute(""" + ALTER TABLE "Playlists" + ADD COLUMN episodecount INTEGER DEFAULT 0 + """) + logger.info("Added episode_count column to Playlists table (PostgreSQL)") + else: + logger.info("episode_count column already exists in Playlists table (PostgreSQL)") + else: + # Check if episode_count column exists (MySQL) + cursor.execute(""" + SELECT COUNT(*) + FROM INFORMATION_SCHEMA.COLUMNS + WHERE TABLE_NAME = 'Playlists' + AND COLUMN_NAME = 'EpisodeCount' + AND TABLE_SCHEMA = DATABASE() + """) + column_exists = cursor.fetchone()[0] > 0 + + if not column_exists: + cursor.execute(""" + ALTER TABLE Playlists + ADD COLUMN EpisodeCount INT DEFAULT 0 + """) + logger.info("Added EpisodeCount column to Playlists table (MySQL)") + else: + logger.info("EpisodeCount column already exists in Playlists table (MySQL)") + + # Define default playlists (same as migration 012 but will be assigned to each user) + default_playlists = [ + { + 'name': 'Quick Listens', + 'description': 'Short episodes under 15 minutes, perfect for quick breaks', + 'min_duration': 1, # Exclude 0-duration episodes + 'max_duration': 900, # 15 minutes + 'sort_order': 'duration_asc', + 'icon_name': 'ph-fast-forward', + 'max_episodes': 1000 + }, + { + 'name': 'Longform', + 'description': 'Extended episodes over 1 hour, ideal for long drives or deep dives', + 'min_duration': 3600, # 1 hour + 'max_duration': None, + 'sort_order': 'duration_desc', + 'icon_name': 'ph-car', + 'max_episodes': 1000 + }, + { + 'name': 'Currently Listening', + 'description': 'Episodes you\'ve started but haven\'t finished', + 'min_duration': None, + 'max_duration': None, + 'sort_order': 'date_desc', + 'include_unplayed': False, + 'include_partially_played': True, + 'include_played': False, + 'icon_name': 'ph-play' + }, + { + 'name': 'Fresh Releases', + 'description': 'Latest episodes from the last 24 hours', + 'min_duration': None, + 'max_duration': None, + 'sort_order': 'date_desc', + 'include_unplayed': True, + 'include_partially_played': False, + 'include_played': False, + 'time_filter_hours': 24, + 'icon_name': 'ph-sparkle' + }, + { + 'name': 'Weekend Marathon', + 'description': 'Longer episodes (30+ minutes) perfect for weekend listening', + 'min_duration': 1800, # 30 minutes + 'max_duration': None, + 'sort_order': 'duration_desc', + 'group_by_podcast': True, + 'icon_name': 'ph-couch', + 'max_episodes': 1000 + }, + { + 'name': 'Commuter Mix', + 'description': 'Perfect-length episodes (15-45 minutes) for your daily commute', + 'min_duration': 900, # 15 minutes + 'max_duration': 2700, # 45 minutes + 'sort_order': 'date_desc', + 'icon_name': 'ph-car-simple', + 'max_episodes': 1000 + } + ] + + # Get all existing users (excluding background user if present) + if db_type == "postgresql": + cursor.execute('SELECT userid FROM "Users" WHERE userid > 1') + else: + cursor.execute('SELECT UserID FROM Users WHERE UserID > 1') + + users = cursor.fetchall() + logger.info(f"Found {len(users)} users to create default playlists for") + + # Create default playlists for each user + for user_row in users: + user_id = user_row[0] if isinstance(user_row, tuple) else user_row['userid' if db_type == "postgresql" else 'UserID'] + logger.info(f"Creating default playlists for user {user_id}") + + for playlist in default_playlists: + try: + # Check if this playlist already exists for this user + if db_type == "postgresql": + cursor.execute(""" + SELECT COUNT(*) + FROM "Playlists" + WHERE userid = %s AND name = %s + """, (user_id, playlist['name'])) + else: + cursor.execute(""" + SELECT COUNT(*) + FROM Playlists + WHERE UserID = %s AND Name = %s + """, (user_id, playlist['name'])) + + if cursor.fetchone()[0] == 0: + # Create the playlist for this user + if db_type == "postgresql": + cursor.execute(""" + INSERT INTO "Playlists" ( + userid, + name, + description, + issystemplaylist, + minduration, + maxduration, + sortorder, + includeunplayed, + includepartiallyplayed, + includeplayed, + timefilterhours, + groupbypodcast, + maxepisodes, + iconname, + episodecount + ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) + """, ( + user_id, + playlist['name'], + playlist['description'], + False, # No longer system playlists + playlist.get('min_duration'), + playlist.get('max_duration'), + playlist['sort_order'], + playlist.get('include_unplayed', True), + playlist.get('include_partially_played', True), + playlist.get('include_played', True), + playlist.get('time_filter_hours'), + playlist.get('group_by_podcast', False), + playlist.get('max_episodes'), + playlist['icon_name'], + 0 # Will be updated by scheduled count update + )) + else: + cursor.execute(""" + INSERT INTO Playlists ( + UserID, + Name, + Description, + IsSystemPlaylist, + MinDuration, + MaxDuration, + SortOrder, + IncludeUnplayed, + IncludePartiallyPlayed, + IncludePlayed, + TimeFilterHours, + GroupByPodcast, + MaxEpisodes, + IconName, + EpisodeCount + ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) + """, ( + user_id, + playlist['name'], + playlist['description'], + False, # No longer system playlists + playlist.get('min_duration'), + playlist.get('max_duration'), + playlist['sort_order'], + playlist.get('include_unplayed', True), + playlist.get('include_partially_played', True), + playlist.get('include_played', True), + playlist.get('time_filter_hours'), + playlist.get('group_by_podcast', False), + playlist.get('max_episodes'), + playlist['icon_name'], + 0 # Will be updated by scheduled count update + )) + + logger.info(f"Created playlist '{playlist['name']}' for user {user_id}") + else: + logger.info(f"Playlist '{playlist['name']}' already exists for user {user_id}") + + except Exception as e: + logger.error(f"Failed to create playlist '{playlist['name']}' for user {user_id}: {e}") + # Continue with other playlists even if one fails + + # Commit all changes + conn.commit() + logger.info("Successfully created default playlists for all existing users") + + except Exception as e: + logger.error(f"Error in user default playlists migration: {e}") + raise + finally: + cursor.close() + + +# ============================================================================ +# GPODDER SYNC MIGRATIONS +# These migrations match the gpodder-api service migrations from Go code +# ============================================================================ + +@register_migration("100", "gpodder_initial_schema", "Create initial gpodder sync tables") +def migration_100_gpodder_initial_schema(conn, db_type: str): + """Create initial gpodder sync schema - matches Go migration version 1""" + cursor = conn.cursor() + + try: + logger.info("Starting gpodder migration 100: Initial schema creation") + + if db_type == 'postgresql': + # Create all gpodder sync tables for PostgreSQL + tables_sql = [ + ''' + CREATE TABLE IF NOT EXISTS "GpodderSyncMigrations" ( + Version INT PRIMARY KEY, + Description TEXT NOT NULL, + AppliedAt TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP + ) + ''', + ''' + CREATE TABLE IF NOT EXISTS "GpodderSyncDeviceState" ( + DeviceStateID SERIAL PRIMARY KEY, + UserID INT NOT NULL, + DeviceID INT NOT NULL, + SubscriptionCount INT DEFAULT 0, + LastUpdated TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (UserID) REFERENCES "Users"(UserID) ON DELETE CASCADE, + FOREIGN KEY (DeviceID) REFERENCES "GpodderDevices"(DeviceID) ON DELETE CASCADE, + UNIQUE(UserID, DeviceID) + ) + ''', + ''' + CREATE TABLE IF NOT EXISTS "GpodderSyncSubscriptions" ( + SubscriptionID SERIAL PRIMARY KEY, + UserID INT NOT NULL, + DeviceID INT NOT NULL, + PodcastURL TEXT NOT NULL, + Action VARCHAR(10) NOT NULL, + Timestamp BIGINT NOT NULL, + FOREIGN KEY (UserID) REFERENCES "Users"(UserID) ON DELETE CASCADE, + FOREIGN KEY (DeviceID) REFERENCES "GpodderDevices"(DeviceID) ON DELETE CASCADE + ) + ''', + ''' + CREATE TABLE IF NOT EXISTS "GpodderSyncEpisodeActions" ( + ActionID SERIAL PRIMARY KEY, + UserID INT NOT NULL, + DeviceID INT, + PodcastURL TEXT NOT NULL, + EpisodeURL TEXT NOT NULL, + Action VARCHAR(20) NOT NULL, + Timestamp BIGINT NOT NULL, + Started INT, + Position INT, + Total INT, + FOREIGN KEY (UserID) REFERENCES "Users"(UserID) ON DELETE CASCADE, + FOREIGN KEY (DeviceID) REFERENCES "GpodderDevices"(DeviceID) ON DELETE CASCADE + ) + ''', + ''' + CREATE TABLE IF NOT EXISTS "GpodderSyncPodcastLists" ( + ListID SERIAL PRIMARY KEY, + UserID INT NOT NULL, + Name VARCHAR(255) NOT NULL, + Title VARCHAR(255) NOT NULL, + CreatedAt TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (UserID) REFERENCES "Users"(UserID) ON DELETE CASCADE, + UNIQUE(UserID, Name) + ) + ''', + ''' + CREATE TABLE IF NOT EXISTS "GpodderSyncPodcastListEntries" ( + EntryID SERIAL PRIMARY KEY, + ListID INT NOT NULL, + PodcastURL TEXT NOT NULL, + CreatedAt TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (ListID) REFERENCES "GpodderSyncPodcastLists"(ListID) ON DELETE CASCADE + ) + ''', + ''' + CREATE TABLE IF NOT EXISTS "GpodderSyncDevicePairs" ( + PairID SERIAL PRIMARY KEY, + UserID INT NOT NULL, + DeviceID1 INT NOT NULL, + DeviceID2 INT NOT NULL, + CreatedAt TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (UserID) REFERENCES "Users"(UserID) ON DELETE CASCADE, + FOREIGN KEY (DeviceID1) REFERENCES "GpodderDevices"(DeviceID) ON DELETE CASCADE, + FOREIGN KEY (DeviceID2) REFERENCES "GpodderDevices"(DeviceID) ON DELETE CASCADE, + UNIQUE(UserID, DeviceID1, DeviceID2) + ) + ''', + ''' + CREATE TABLE IF NOT EXISTS "GpodderSyncSettings" ( + SettingID SERIAL PRIMARY KEY, + UserID INT NOT NULL, + Scope VARCHAR(20) NOT NULL, + DeviceID INT, + PodcastURL TEXT, + EpisodeURL TEXT, + SettingKey VARCHAR(255) NOT NULL, + SettingValue TEXT, + CreatedAt TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + LastUpdated TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (UserID) REFERENCES "Users"(UserID) ON DELETE CASCADE, + FOREIGN KEY (DeviceID) REFERENCES "GpodderDevices"(DeviceID) ON DELETE CASCADE + ) + ''' + ] + + # Create indexes + indexes_sql = [ + 'CREATE INDEX IF NOT EXISTS idx_gpodder_sync_subscriptions_userid ON "GpodderSyncSubscriptions"(UserID)', + 'CREATE INDEX IF NOT EXISTS idx_gpodder_sync_subscriptions_deviceid ON "GpodderSyncSubscriptions"(DeviceID)', + 'CREATE INDEX IF NOT EXISTS idx_gpodder_sync_episode_actions_userid ON "GpodderSyncEpisodeActions"(UserID)', + 'CREATE INDEX IF NOT EXISTS idx_gpodder_sync_podcast_lists_userid ON "GpodderSyncPodcastLists"(UserID)' + ] + + else: # mysql + # Create all gpodder sync tables for MySQL + tables_sql = [ + ''' + CREATE TABLE IF NOT EXISTS GpodderSyncMigrations ( + Version INT PRIMARY KEY, + Description TEXT NOT NULL, + AppliedAt TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP + ) + ''', + ''' + CREATE TABLE IF NOT EXISTS GpodderSyncDeviceState ( + DeviceStateID INT AUTO_INCREMENT PRIMARY KEY, + UserID INT NOT NULL, + DeviceID INT NOT NULL, + SubscriptionCount INT DEFAULT 0, + LastUpdated TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (UserID) REFERENCES Users(UserID) ON DELETE CASCADE, + FOREIGN KEY (DeviceID) REFERENCES GpodderDevices(DeviceID) ON DELETE CASCADE, + UNIQUE(UserID, DeviceID) + ) + ''', + ''' + CREATE TABLE IF NOT EXISTS GpodderSyncSubscriptions ( + SubscriptionID INT AUTO_INCREMENT PRIMARY KEY, + UserID INT NOT NULL, + DeviceID INT NOT NULL, + PodcastURL TEXT NOT NULL, + Action VARCHAR(10) NOT NULL, + Timestamp BIGINT NOT NULL, + FOREIGN KEY (UserID) REFERENCES Users(UserID) ON DELETE CASCADE, + FOREIGN KEY (DeviceID) REFERENCES GpodderDevices(DeviceID) ON DELETE CASCADE + ) + ''', + ''' + CREATE TABLE IF NOT EXISTS GpodderSyncEpisodeActions ( + ActionID INT AUTO_INCREMENT PRIMARY KEY, + UserID INT NOT NULL, + DeviceID INT, + PodcastURL TEXT NOT NULL, + EpisodeURL TEXT NOT NULL, + Action VARCHAR(20) NOT NULL, + Timestamp BIGINT NOT NULL, + Started INT, + Position INT, + Total INT, + FOREIGN KEY (UserID) REFERENCES Users(UserID) ON DELETE CASCADE, + FOREIGN KEY (DeviceID) REFERENCES GpodderDevices(DeviceID) ON DELETE CASCADE + ) + ''', + ''' + CREATE TABLE IF NOT EXISTS GpodderSyncPodcastLists ( + ListID INT AUTO_INCREMENT PRIMARY KEY, + UserID INT NOT NULL, + Name VARCHAR(255) NOT NULL, + Title VARCHAR(255) NOT NULL, + CreatedAt TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (UserID) REFERENCES Users(UserID) ON DELETE CASCADE, + UNIQUE(UserID, Name) + ) + ''', + ''' + CREATE TABLE IF NOT EXISTS GpodderSyncPodcastListEntries ( + EntryID INT AUTO_INCREMENT PRIMARY KEY, + ListID INT NOT NULL, + PodcastURL TEXT NOT NULL, + CreatedAt TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (ListID) REFERENCES GpodderSyncPodcastLists(ListID) ON DELETE CASCADE + ) + ''', + ''' + CREATE TABLE IF NOT EXISTS GpodderSyncDevicePairs ( + PairID INT AUTO_INCREMENT PRIMARY KEY, + UserID INT NOT NULL, + DeviceID1 INT NOT NULL, + DeviceID2 INT NOT NULL, + CreatedAt TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (UserID) REFERENCES Users(UserID) ON DELETE CASCADE, + FOREIGN KEY (DeviceID1) REFERENCES GpodderDevices(DeviceID) ON DELETE CASCADE, + FOREIGN KEY (DeviceID2) REFERENCES GpodderDevices(DeviceID) ON DELETE CASCADE, + UNIQUE(UserID, DeviceID1, DeviceID2) + ) + ''', + ''' + CREATE TABLE IF NOT EXISTS GpodderSyncSettings ( + SettingID INT AUTO_INCREMENT PRIMARY KEY, + UserID INT NOT NULL, + Scope VARCHAR(20) NOT NULL, + DeviceID INT, + PodcastURL TEXT, + EpisodeURL TEXT, + SettingKey VARCHAR(255) NOT NULL, + SettingValue TEXT, + CreatedAt TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + LastUpdated TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + FOREIGN KEY (UserID) REFERENCES Users(UserID) ON DELETE CASCADE, + FOREIGN KEY (DeviceID) REFERENCES GpodderDevices(DeviceID) ON DELETE CASCADE + ) + ''' + ] + + # Create indexes + indexes_sql = [ + 'CREATE INDEX idx_gpodder_sync_subscriptions_userid ON GpodderSyncSubscriptions(UserID)', + 'CREATE INDEX idx_gpodder_sync_subscriptions_deviceid ON GpodderSyncSubscriptions(DeviceID)', + 'CREATE INDEX idx_gpodder_sync_episode_actions_userid ON GpodderSyncEpisodeActions(UserID)', + 'CREATE INDEX idx_gpodder_sync_podcast_lists_userid ON GpodderSyncPodcastLists(UserID)' + ] + + # Execute table creation + for sql in tables_sql: + safe_execute_sql(cursor, sql, conn=conn) + + # Execute index creation + for sql in indexes_sql: + safe_execute_sql(cursor, sql, conn=conn) + + logger.info("Created gpodder sync initial schema successfully") + + except Exception as e: + logger.error(f"Error in gpodder migration 100: {e}") + raise + finally: + cursor.close() + + +@register_migration("101", "gpodder_add_api_version", "Add API version column to GpodderSyncSettings") +def migration_101_gpodder_add_api_version(conn, db_type: str): + """Add API version column - matches Go migration version 2""" + cursor = conn.cursor() + + try: + logger.info("Starting gpodder migration 101: Add API version column") + + if db_type == 'postgresql': + safe_execute_sql(cursor, ''' + ALTER TABLE "GpodderSyncSettings" + ADD COLUMN IF NOT EXISTS APIVersion VARCHAR(10) DEFAULT '2.0' + ''', conn=conn) + else: # mysql + # Check if column exists first, then add if it doesn't + cursor.execute(""" + SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS + WHERE TABLE_NAME = 'GpodderSyncSettings' + AND COLUMN_NAME = 'APIVersion' + AND TABLE_SCHEMA = DATABASE() + """) + + if cursor.fetchone()[0] == 0: + safe_execute_sql(cursor, ''' + ALTER TABLE GpodderSyncSettings + ADD COLUMN APIVersion VARCHAR(10) DEFAULT '2.0' + ''', conn=conn) + logger.info("Added APIVersion column to GpodderSyncSettings") + else: + logger.info("APIVersion column already exists in GpodderSyncSettings") + + logger.info("Gpodder API version migration completed successfully") + + except Exception as e: + logger.error(f"Error in gpodder migration 101: {e}") + raise + finally: + cursor.close() + + +@register_migration("102", "gpodder_create_sessions", "Create GpodderSessions table for API sessions") +def migration_102_gpodder_create_sessions(conn, db_type: str): + """Create GpodderSessions table - matches Go migration version 3""" + cursor = conn.cursor() + + try: + logger.info("Starting gpodder migration 102: Create GpodderSessions table") + + if db_type == 'postgresql': + safe_execute_sql(cursor, ''' + CREATE TABLE IF NOT EXISTS "GpodderSessions" ( + SessionID SERIAL PRIMARY KEY, + UserID INT NOT NULL, + SessionToken TEXT NOT NULL, + CreatedAt TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + ExpiresAt TIMESTAMP NOT NULL, + LastActive TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + UserAgent TEXT, + ClientIP TEXT, + FOREIGN KEY (UserID) REFERENCES "Users"(UserID) ON DELETE CASCADE, + UNIQUE(SessionToken) + ) + ''', conn=conn) + + # Create indexes + indexes_sql = [ + 'CREATE INDEX IF NOT EXISTS idx_gpodder_sessions_token ON "GpodderSessions"(SessionToken)', + 'CREATE INDEX IF NOT EXISTS idx_gpodder_sessions_userid ON "GpodderSessions"(UserID)', + 'CREATE INDEX IF NOT EXISTS idx_gpodder_sessions_expires ON "GpodderSessions"(ExpiresAt)' + ] + else: # mysql + safe_execute_sql(cursor, ''' + CREATE TABLE IF NOT EXISTS GpodderSessions ( + SessionID INT AUTO_INCREMENT PRIMARY KEY, + UserID INT NOT NULL, + SessionToken TEXT NOT NULL, + CreatedAt TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + ExpiresAt TIMESTAMP NOT NULL, + LastActive TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + UserAgent TEXT, + ClientIP TEXT, + FOREIGN KEY (UserID) REFERENCES Users(UserID) ON DELETE CASCADE + ) + ''', conn=conn) + + # Create indexes + indexes_sql = [ + 'CREATE INDEX idx_gpodder_sessions_userid ON GpodderSessions(UserID)', + 'CREATE INDEX idx_gpodder_sessions_expires ON GpodderSessions(ExpiresAt)' + ] + + # Execute index creation + for sql in indexes_sql: + safe_execute_sql(cursor, sql, conn=conn) + + logger.info("Created GpodderSessions table successfully") + + except Exception as e: + logger.error(f"Error in gpodder migration 102: {e}") + raise + finally: + cursor.close() + + +@register_migration("103", "gpodder_sync_state_table", "Add sync state table for tracking device sync status") +def migration_103_gpodder_sync_state_table(conn, db_type: str): + """Create GpodderSyncState table - matches Go migration version 4""" + cursor = conn.cursor() + + try: + logger.info("Starting gpodder migration 103: Add sync state table") + + if db_type == 'postgresql': + safe_execute_sql(cursor, ''' + CREATE TABLE IF NOT EXISTS "GpodderSyncState" ( + SyncStateID SERIAL PRIMARY KEY, + UserID INT NOT NULL, + DeviceID INT NOT NULL, + LastTimestamp BIGINT DEFAULT 0, + LastSync TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (UserID) REFERENCES "Users"(UserID) ON DELETE CASCADE, + FOREIGN KEY (DeviceID) REFERENCES "GpodderDevices"(DeviceID) ON DELETE CASCADE, + UNIQUE(UserID, DeviceID) + ) + ''', conn=conn) + + safe_execute_sql(cursor, ''' + CREATE INDEX IF NOT EXISTS idx_gpodder_syncstate_userid_deviceid ON "GpodderSyncState"(UserID, DeviceID) + ''', conn=conn) + else: # mysql + safe_execute_sql(cursor, ''' + CREATE TABLE IF NOT EXISTS GpodderSyncState ( + SyncStateID INT AUTO_INCREMENT PRIMARY KEY, + UserID INT NOT NULL, + DeviceID INT NOT NULL, + LastTimestamp BIGINT DEFAULT 0, + LastSync TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + FOREIGN KEY (UserID) REFERENCES Users(UserID) ON DELETE CASCADE, + FOREIGN KEY (DeviceID) REFERENCES GpodderDevices(DeviceID) ON DELETE CASCADE, + UNIQUE(UserID, DeviceID) + ) + ''', conn=conn) + + safe_execute_sql(cursor, ''' + CREATE INDEX idx_gpodder_syncstate_userid_deviceid ON GpodderSyncState(UserID, DeviceID) + ''', conn=conn) + + logger.info("Created GpodderSyncState table successfully") + + except Exception as e: + logger.error(f"Error in gpodder migration 103: {e}") + raise + finally: + cursor.close() + + +@register_migration("104", "create_people_episodes_backup", "Skip PeopleEpisodes_backup - varies by installation") +def migration_104_create_people_episodes_backup(conn, db_type: str): + """Skip PeopleEpisodes_backup table - this varies by installation and shouldn't be validated""" + logger.info("Skipping migration 104: PeopleEpisodes_backup table varies by installation") + # This migration is a no-op since backup tables vary by installation + # and shouldn't be part of the expected schema + + +@register_migration("105", "optimize_episode_actions_performance", "Add indexes and optimize episode actions queries") +def migration_105_optimize_episode_actions_performance(conn, db_type: str): + """Add critical indexes for episode actions performance and create optimized views""" + cursor = conn.cursor() + + try: + logger.info("Adding performance indexes for episode actions...") + + if db_type == 'postgresql': + # Critical indexes for episode actions performance + safe_execute_sql(cursor, ''' + CREATE INDEX IF NOT EXISTS idx_episode_actions_user_timestamp + ON "GpodderSyncEpisodeActions"(UserID, Timestamp DESC) + ''', conn=conn) + + safe_execute_sql(cursor, ''' + CREATE INDEX IF NOT EXISTS idx_episode_actions_device_timestamp + ON "GpodderSyncEpisodeActions"(DeviceID, Timestamp DESC) + WHERE DeviceID IS NOT NULL + ''', conn=conn) + + safe_execute_sql(cursor, ''' + CREATE INDEX IF NOT EXISTS idx_episode_actions_podcast_episode + ON "GpodderSyncEpisodeActions"(UserID, PodcastURL, EpisodeURL, Timestamp DESC) + ''', conn=conn) + + safe_execute_sql(cursor, ''' + CREATE INDEX IF NOT EXISTS idx_episode_actions_since_filter + ON "GpodderSyncEpisodeActions"(UserID, Timestamp DESC, DeviceID) + WHERE Timestamp > 0 + ''', conn=conn) + + # Optimize devices table lookups + safe_execute_sql(cursor, ''' + CREATE INDEX IF NOT EXISTS idx_gpodder_devices_user_name + ON "GpodderDevices"(UserID, DeviceName) + WHERE IsActive = true + ''', conn=conn) + + else: # mysql/mariadb + # Critical indexes for episode actions performance + safe_execute_sql(cursor, ''' + CREATE INDEX idx_episode_actions_user_timestamp + ON GpodderSyncEpisodeActions(UserID, Timestamp DESC) + ''', conn=conn) + + safe_execute_sql(cursor, ''' + CREATE INDEX idx_episode_actions_device_timestamp + ON GpodderSyncEpisodeActions(DeviceID, Timestamp DESC) + ''', conn=conn) + + safe_execute_sql(cursor, ''' + CREATE INDEX idx_episode_actions_podcast_episode + ON GpodderSyncEpisodeActions(UserID, PodcastURL(255), EpisodeURL(255), Timestamp DESC) + ''', conn=conn) + + safe_execute_sql(cursor, ''' + CREATE INDEX idx_episode_actions_since_filter + ON GpodderSyncEpisodeActions(UserID, Timestamp DESC, DeviceID) + ''', conn=conn) + + # Optimize devices table lookups + safe_execute_sql(cursor, ''' + CREATE INDEX idx_gpodder_devices_user_name + ON GpodderDevices(UserID, DeviceName) + ''', conn=conn) + + logger.info("Successfully added episode actions performance indexes") + + except Exception as e: + logger.error(f"Error in gpodder migration 105: {e}") + raise + finally: + cursor.close() + + +@register_migration("106", "optimize_subscription_sync_performance", "Add missing indexes for subscription sync queries", requires=["103"]) +def migration_106_optimize_subscription_sync_performance(conn, db_type: str): + """Add critical indexes for subscription sync performance to prevent AntennaPod timeouts""" + cursor = conn.cursor() + + try: + logger.info("Adding performance indexes for subscription sync...") + + if db_type == 'postgresql': + # Critical indexes for subscription sync performance + safe_execute_sql(cursor, ''' + CREATE INDEX IF NOT EXISTS idx_gpodder_sync_subs_user_device_timestamp + ON "GpodderSyncSubscriptions"(UserID, DeviceID, Timestamp DESC) + ''', conn=conn) + + safe_execute_sql(cursor, ''' + CREATE INDEX IF NOT EXISTS idx_gpodder_sync_subs_user_action_timestamp + ON "GpodderSyncSubscriptions"(UserID, Action, Timestamp DESC) + ''', conn=conn) + + safe_execute_sql(cursor, ''' + CREATE INDEX IF NOT EXISTS idx_gpodder_sync_subs_podcast_url_user + ON "GpodderSyncSubscriptions"(UserID, PodcastURL, Timestamp DESC) + ''', conn=conn) + + # Optimize subscription change queries with compound index + safe_execute_sql(cursor, ''' + CREATE INDEX IF NOT EXISTS idx_gpodder_sync_subs_complex_query + ON "GpodderSyncSubscriptions"(UserID, DeviceID, Action, Timestamp DESC, PodcastURL) + ''', conn=conn) + + else: # mysql/mariadb + # Critical indexes for subscription sync performance + safe_execute_sql(cursor, ''' + CREATE INDEX idx_gpodder_sync_subs_user_device_timestamp + ON GpodderSyncSubscriptions(UserID, DeviceID, Timestamp DESC) + ''', conn=conn) + + safe_execute_sql(cursor, ''' + CREATE INDEX idx_gpodder_sync_subs_user_action_timestamp + ON GpodderSyncSubscriptions(UserID, Action, Timestamp DESC) + ''', conn=conn) + + safe_execute_sql(cursor, ''' + CREATE INDEX idx_gpodder_sync_subs_podcast_url_user + ON GpodderSyncSubscriptions(UserID, PodcastURL(255), Timestamp DESC) + ''', conn=conn) + + # Optimize subscription change queries with compound index + safe_execute_sql(cursor, ''' + CREATE INDEX idx_gpodder_sync_subs_complex_query + ON GpodderSyncSubscriptions(UserID, DeviceID, Action, Timestamp DESC, PodcastURL(255)) + ''', conn=conn) + + logger.info("Successfully added subscription sync performance indexes") + + except Exception as e: + logger.error(f"Error in gpodder migration 106: {e}") + raise + finally: + cursor.close() + + +@register_migration("033", "add_http_notification_columns", "Add generic HTTP notification columns to UserNotificationSettings table", requires=["011"]) +def migration_033_add_http_notification_columns(conn, db_type: str): + """Add generic HTTP notification columns for platforms like Telegram""" + cursor = conn.cursor() + + try: + if db_type == "postgresql": + # Check if columns already exist (PostgreSQL - lowercase column names) + cursor.execute(""" + SELECT column_name FROM information_schema.columns + WHERE table_name = 'UserNotificationSettings' + AND column_name IN ('httpurl', 'httptoken', 'httpmethod') + """) + existing_columns = [row[0] for row in cursor.fetchall()] + + if 'httpurl' not in existing_columns: + cursor.execute(""" + ALTER TABLE "UserNotificationSettings" + ADD COLUMN HttpUrl VARCHAR(500) + """) + logger.info("Added HttpUrl column to UserNotificationSettings table (PostgreSQL)") + + if 'httptoken' not in existing_columns: + cursor.execute(""" + ALTER TABLE "UserNotificationSettings" + ADD COLUMN HttpToken VARCHAR(255) + """) + logger.info("Added HttpToken column to UserNotificationSettings table (PostgreSQL)") + + if 'httpmethod' not in existing_columns: + cursor.execute(""" + ALTER TABLE "UserNotificationSettings" + ADD COLUMN HttpMethod VARCHAR(10) DEFAULT 'POST' + """) + logger.info("Added HttpMethod column to UserNotificationSettings table (PostgreSQL)") + + else: + # Check if columns already exist (MySQL) + cursor.execute(""" + SELECT COUNT(*) + FROM information_schema.columns + WHERE table_name = 'UserNotificationSettings' + AND column_name = 'HttpUrl' + AND table_schema = DATABASE() + """) + url_exists = cursor.fetchone()[0] > 0 + + cursor.execute(""" + SELECT COUNT(*) + FROM information_schema.columns + WHERE table_name = 'UserNotificationSettings' + AND column_name = 'HttpToken' + AND table_schema = DATABASE() + """) + token_exists = cursor.fetchone()[0] > 0 + + cursor.execute(""" + SELECT COUNT(*) + FROM information_schema.columns + WHERE table_name = 'UserNotificationSettings' + AND column_name = 'HttpMethod' + AND table_schema = DATABASE() + """) + method_exists = cursor.fetchone()[0] > 0 + + if not url_exists: + cursor.execute(""" + ALTER TABLE UserNotificationSettings + ADD COLUMN HttpUrl VARCHAR(500) + """) + logger.info("Added HttpUrl column to UserNotificationSettings table (MySQL)") + + if not token_exists: + cursor.execute(""" + ALTER TABLE UserNotificationSettings + ADD COLUMN HttpToken VARCHAR(255) + """) + logger.info("Added HttpToken column to UserNotificationSettings table (MySQL)") + + if not method_exists: + cursor.execute(""" + ALTER TABLE UserNotificationSettings + ADD COLUMN HttpMethod VARCHAR(10) DEFAULT 'POST' + """) + logger.info("Added HttpMethod column to UserNotificationSettings table (MySQL)") + + logger.info("HTTP notification columns migration completed successfully") + + finally: + cursor.close() + + +@register_migration("034", "add_podcast_merge_columns", "Add podcast merge columns to support merging podcasts", requires=["033"]) +def migration_034_add_podcast_merge_columns(conn, db_type: str): + """Add DisplayPodcast, RefreshPodcast, and MergedPodcastIDs columns to Podcasts table""" + cursor = conn.cursor() + + try: + if db_type == "postgresql": + # Check if columns already exist (PostgreSQL) + cursor.execute(""" + SELECT column_name FROM information_schema.columns + WHERE table_name = 'Podcasts' + AND column_name IN ('displaypodcast', 'refreshpodcast', 'mergedpodcastids') + """) + existing_columns = [row[0] for row in cursor.fetchall()] + + if 'displaypodcast' not in existing_columns: + cursor.execute(""" + ALTER TABLE "Podcasts" + ADD COLUMN DisplayPodcast BOOLEAN DEFAULT TRUE + """) + logger.info("Added DisplayPodcast column to Podcasts table (PostgreSQL)") + + if 'refreshpodcast' not in existing_columns: + cursor.execute(""" + ALTER TABLE "Podcasts" + ADD COLUMN RefreshPodcast BOOLEAN DEFAULT TRUE + """) + logger.info("Added RefreshPodcast column to Podcasts table (PostgreSQL)") + + if 'mergedpodcastids' not in existing_columns: + cursor.execute(""" + ALTER TABLE "Podcasts" + ADD COLUMN MergedPodcastIDs TEXT + """) + logger.info("Added MergedPodcastIDs column to Podcasts table (PostgreSQL)") + + else: # MySQL + # Check if columns already exist (MySQL) + cursor.execute(""" + SELECT COUNT(*) + FROM information_schema.columns + WHERE table_name = 'Podcasts' + AND column_name = 'DisplayPodcast' + AND table_schema = DATABASE() + """) + display_exists = cursor.fetchone()[0] > 0 + + cursor.execute(""" + SELECT COUNT(*) + FROM information_schema.columns + WHERE table_name = 'Podcasts' + AND column_name = 'RefreshPodcast' + AND table_schema = DATABASE() + """) + refresh_exists = cursor.fetchone()[0] > 0 + + cursor.execute(""" + SELECT COUNT(*) + FROM information_schema.columns + WHERE table_name = 'Podcasts' + AND column_name = 'MergedPodcastIDs' + AND table_schema = DATABASE() + """) + merged_exists = cursor.fetchone()[0] > 0 + + if not display_exists: + cursor.execute(""" + ALTER TABLE Podcasts + ADD COLUMN DisplayPodcast TINYINT(1) DEFAULT 1 + """) + logger.info("Added DisplayPodcast column to Podcasts table (MySQL)") + + if not refresh_exists: + cursor.execute(""" + ALTER TABLE Podcasts + ADD COLUMN RefreshPodcast TINYINT(1) DEFAULT 1 + """) + logger.info("Added RefreshPodcast column to Podcasts table (MySQL)") + + if not merged_exists: + cursor.execute(""" + ALTER TABLE Podcasts + ADD COLUMN MergedPodcastIDs TEXT + """) + logger.info("Added MergedPodcastIDs column to Podcasts table (MySQL)") + + # Add index on DisplayPodcast for performance + table_quote = "`" if db_type != "postgresql" else '"' + safe_add_index(cursor, db_type, + f'CREATE INDEX idx_podcasts_displaypodcast ON {table_quote}Podcasts{table_quote} (DisplayPodcast)', + 'idx_podcasts_displaypodcast') + + logger.info("Podcast merge columns migration completed successfully") + + finally: + cursor.close() + + +@register_migration("035", "add_podcast_cover_preference_columns", "Add podcast cover preference columns to Users and Podcasts tables", requires=["034"]) +def migration_035_add_podcast_cover_preference_columns(conn, db_type: str): + """Add podcast cover preference columns to Users and Podcasts tables for existing installations""" + cursor = conn.cursor() + + try: + # Add UsePodcastCovers to Users table if it doesn't exist + try: + if db_type == "postgresql": + cursor.execute(""" + ALTER TABLE "Users" + ADD COLUMN IF NOT EXISTS UsePodcastCovers BOOLEAN DEFAULT FALSE + """) + else: # MySQL/MariaDB + # Check if column exists first + cursor.execute(""" + SELECT COUNT(*) + FROM INFORMATION_SCHEMA.COLUMNS + WHERE TABLE_SCHEMA = DATABASE() + AND TABLE_NAME = 'Users' + AND COLUMN_NAME = 'UsePodcastCovers' + """) + if cursor.fetchone()[0] == 0: + cursor.execute(""" + ALTER TABLE Users + ADD COLUMN UsePodcastCovers TINYINT(1) DEFAULT 0 + """) + logger.info("Added UsePodcastCovers column to Users table") + else: + logger.info("UsePodcastCovers column already exists in Users table") + + except Exception as e: + logger.error(f"Error adding UsePodcastCovers to Users table: {e}") + + # Add UsePodcastCovers columns to Podcasts table if they don't exist + try: + if db_type == "postgresql": + cursor.execute(""" + ALTER TABLE "Podcasts" + ADD COLUMN IF NOT EXISTS UsePodcastCovers BOOLEAN DEFAULT FALSE, + ADD COLUMN IF NOT EXISTS UsePodcastCoversCustomized BOOLEAN DEFAULT FALSE + """) + else: # MySQL/MariaDB + # Check if UsePodcastCovers column exists + cursor.execute(""" + SELECT COUNT(*) + FROM INFORMATION_SCHEMA.COLUMNS + WHERE TABLE_SCHEMA = DATABASE() + AND TABLE_NAME = 'Podcasts' + AND COLUMN_NAME = 'UsePodcastCovers' + """) + if cursor.fetchone()[0] == 0: + cursor.execute(""" + ALTER TABLE Podcasts + ADD COLUMN UsePodcastCovers TINYINT(1) DEFAULT 0 + """) + logger.info("Added UsePodcastCovers column to Podcasts table") + else: + logger.info("UsePodcastCovers column already exists in Podcasts table") + + # Check if UsePodcastCoversCustomized column exists + cursor.execute(""" + SELECT COUNT(*) + FROM INFORMATION_SCHEMA.COLUMNS + WHERE TABLE_SCHEMA = DATABASE() + AND TABLE_NAME = 'Podcasts' + AND COLUMN_NAME = 'UsePodcastCoversCustomized' + """) + if cursor.fetchone()[0] == 0: + cursor.execute(""" + ALTER TABLE Podcasts + ADD COLUMN UsePodcastCoversCustomized TINYINT(1) DEFAULT 0 + """) + logger.info("Added UsePodcastCoversCustomized column to Podcasts table") + else: + logger.info("UsePodcastCoversCustomized column already exists in Podcasts table") + + except Exception as e: + logger.error(f"Error adding UsePodcastCovers columns to Podcasts table: {e}") + + logger.info("Podcast cover preference columns migration completed successfully") + + finally: + cursor.close() + + +@register_migration("036", "add_episodecount_column_to_playlists", "Add episodecount column to Playlists table for tracking episode counts", requires=["010"]) +def migration_036_add_episodecount_column(conn, db_type: str): + """Add episodecount column to Playlists table if it doesn't exist + + This migration was needed because migration 032 was applied to existing databases + before the episodecount column addition was added to it. Since migration 032 is + already marked as applied in those databases, the column was never created. + """ + cursor = conn.cursor() + + try: + logger.info("Checking for episodecount column in Playlists table") + + if db_type == "postgresql": + # Check if episodecount column exists + cursor.execute(""" + SELECT column_name FROM information_schema.columns + WHERE table_name = 'Playlists' + AND column_name = 'episodecount' + """) + column_exists = len(cursor.fetchall()) > 0 + + if not column_exists: + cursor.execute(""" + ALTER TABLE "Playlists" + ADD COLUMN episodecount INTEGER DEFAULT 0 + """) + logger.info("Added episodecount column to Playlists table (PostgreSQL)") + else: + logger.info("episodecount column already exists in Playlists table (PostgreSQL)") + else: + # Check if episodecount column exists (MySQL/MariaDB) + cursor.execute(""" + SELECT COUNT(*) + FROM INFORMATION_SCHEMA.COLUMNS + WHERE TABLE_NAME = 'Playlists' + AND COLUMN_NAME = 'EpisodeCount' + AND TABLE_SCHEMA = DATABASE() + """) + column_exists = cursor.fetchone()[0] > 0 + + if not column_exists: + cursor.execute(""" + ALTER TABLE Playlists + ADD COLUMN EpisodeCount INT DEFAULT 0 + """) + logger.info("Added EpisodeCount column to Playlists table (MySQL/MariaDB)") + else: + logger.info("EpisodeCount column already exists in Playlists table (MySQL/MariaDB)") + + logger.info("episodecount column migration completed successfully") + + except Exception as e: + logger.error(f"Error in migration 036: {e}") + raise + finally: + cursor.close() + + +@register_migration("037", "fix_shared_episodes_schema", "Add missing SharedBy and SharedWith columns to SharedEpisodes table", requires=["009"]) +def migration_037_fix_shared_episodes_schema(conn, db_type: str): + """Add missing SharedBy and SharedWith columns to SharedEpisodes table + + Old schema had: EpisodeID, UrlKey, ExpirationDate + New schema needs: EpisodeID, SharedBy, SharedWith, ShareCode, ExpirationDate + """ + cursor = conn.cursor() + + try: + logger.info("Starting SharedEpisodes schema fix migration") + + if db_type == "postgresql": + # Check if sharedby column exists + cursor.execute(""" + SELECT column_name FROM information_schema.columns + WHERE table_name = 'SharedEpisodes' + AND column_name = 'sharedby' + """) + sharedby_exists = len(cursor.fetchall()) > 0 + + if not sharedby_exists: + logger.info("Adding sharedby column to SharedEpisodes table (PostgreSQL)") + cursor.execute(""" + ALTER TABLE "SharedEpisodes" + ADD COLUMN sharedby INTEGER NOT NULL DEFAULT 1 + """) + conn.commit() + + # Check if sharedwith column exists + cursor.execute(""" + SELECT column_name FROM information_schema.columns + WHERE table_name = 'SharedEpisodes' + AND column_name = 'sharedwith' + """) + sharedwith_exists = len(cursor.fetchall()) > 0 + + if not sharedwith_exists: + logger.info("Adding sharedwith column to SharedEpisodes table (PostgreSQL)") + cursor.execute(""" + ALTER TABLE "SharedEpisodes" + ADD COLUMN sharedwith INTEGER + """) + conn.commit() + + # Check if sharecode column exists (might have been UrlKey) + cursor.execute(""" + SELECT column_name FROM information_schema.columns + WHERE table_name = 'SharedEpisodes' + AND column_name = 'sharecode' + """) + sharecode_exists = len(cursor.fetchall()) > 0 + + if not sharecode_exists: + # Check if UrlKey exists + cursor.execute(""" + SELECT column_name FROM information_schema.columns + WHERE table_name = 'SharedEpisodes' + AND column_name IN ('UrlKey', 'urlkey') + """) + urlkey_result = cursor.fetchall() + + if urlkey_result: + urlkey_name = urlkey_result[0][0] + logger.info(f"Renaming {urlkey_name} to sharecode (PostgreSQL)") + cursor.execute(f""" + ALTER TABLE "SharedEpisodes" + RENAME COLUMN "{urlkey_name}" TO sharecode + """) + else: + logger.info("Adding sharecode column to SharedEpisodes table (PostgreSQL)") + cursor.execute(""" + ALTER TABLE "SharedEpisodes" + ADD COLUMN sharecode TEXT UNIQUE + """) + conn.commit() + + logger.info("SharedEpisodes schema fix completed (PostgreSQL)") + + else: # MySQL/MariaDB + # Check if SharedBy column exists + cursor.execute(""" + SELECT COUNT(*) + FROM INFORMATION_SCHEMA.COLUMNS + WHERE TABLE_NAME = 'SharedEpisodes' + AND COLUMN_NAME = 'SharedBy' + AND TABLE_SCHEMA = DATABASE() + """) + sharedby_exists = cursor.fetchone()[0] > 0 + + if not sharedby_exists: + logger.info("Adding SharedBy column to SharedEpisodes table (MySQL)") + cursor.execute(""" + ALTER TABLE SharedEpisodes + ADD COLUMN SharedBy INT NOT NULL DEFAULT 1 + """) + conn.commit() + + # Check if SharedWith column exists + cursor.execute(""" + SELECT COUNT(*) + FROM INFORMATION_SCHEMA.COLUMNS + WHERE TABLE_NAME = 'SharedEpisodes' + AND COLUMN_NAME = 'SharedWith' + AND TABLE_SCHEMA = DATABASE() + """) + sharedwith_exists = cursor.fetchone()[0] > 0 + + if not sharedwith_exists: + logger.info("Adding SharedWith column to SharedEpisodes table (MySQL)") + cursor.execute(""" + ALTER TABLE SharedEpisodes + ADD COLUMN SharedWith INT + """) + conn.commit() + + # Check if ShareCode column exists (might have been UrlKey) + cursor.execute(""" + SELECT COUNT(*) + FROM INFORMATION_SCHEMA.COLUMNS + WHERE TABLE_NAME = 'SharedEpisodes' + AND COLUMN_NAME = 'ShareCode' + AND TABLE_SCHEMA = DATABASE() + """) + sharecode_exists = cursor.fetchone()[0] > 0 + + if not sharecode_exists: + # Check if UrlKey exists + cursor.execute(""" + SELECT COUNT(*) + FROM INFORMATION_SCHEMA.COLUMNS + WHERE TABLE_NAME = 'SharedEpisodes' + AND COLUMN_NAME = 'UrlKey' + AND TABLE_SCHEMA = DATABASE() + """) + urlkey_exists = cursor.fetchone()[0] > 0 + + if urlkey_exists: + logger.info("Renaming UrlKey to ShareCode (MySQL)") + cursor.execute(""" + ALTER TABLE SharedEpisodes + CHANGE COLUMN UrlKey ShareCode TEXT + """) + else: + logger.info("Adding ShareCode column to SharedEpisodes table (MySQL)") + cursor.execute(""" + ALTER TABLE SharedEpisodes + ADD COLUMN ShareCode TEXT + """) + conn.commit() + + logger.info("SharedEpisodes schema fix completed (MySQL)") + + logger.info("SharedEpisodes schema fix migration completed successfully") + + except Exception as e: + logger.error(f"Error in migration 037: {e}") + raise + finally: + cursor.close() + + +@register_migration("107", "fix_gpodder_episode_actions_antennapod", "Fix existing GPodder episode actions to include Started and Total fields for AntennaPod compatibility", requires=["103"]) +def migration_107_fix_gpodder_episode_actions(conn, db_type: str): + """ + Fix existing GPodder episode actions to be compatible with AntennaPod. + AntennaPod requires all play actions to have Started, Position, and Total fields. + This migration adds those fields by joining with the Episodes table to get duration. + """ + cursor = conn.cursor() + + try: + logger.info("Starting GPodder episode actions fix for AntennaPod compatibility...") + + if db_type == "postgresql": + # First, count how many actions need fixing + cursor.execute(""" + SELECT COUNT(*) + FROM "GpodderSyncEpisodeActions" + WHERE action = 'play' + AND (started IS NULL OR total IS NULL OR started < 0 OR total <= 0) + """) + count_result = cursor.fetchone() + actions_to_fix = count_result[0] if count_result else 0 + + logger.info(f"Found {actions_to_fix} play actions that need fixing (PostgreSQL)") + + if actions_to_fix > 0: + # Update from Episodes table join + logger.info("Updating episode actions with duration from Episodes table...") + cursor.execute(""" + UPDATE "GpodderSyncEpisodeActions" AS gsa + SET + started = 0, + total = e.episodeduration + FROM "Episodes" e + WHERE gsa.action = 'play' + AND gsa.episodeurl = e.episodeurl + AND e.episodeduration IS NOT NULL + AND e.episodeduration > 0 + AND (gsa.started IS NULL OR gsa.total IS NULL OR gsa.started < 0 OR gsa.total <= 0) + """) + conn.commit() + + # Fallback: use Position as Total for episodes not in Episodes table + logger.info("Updating remaining actions using Position as fallback for Total...") + cursor.execute(""" + UPDATE "GpodderSyncEpisodeActions" + SET + started = 0, + total = COALESCE(position, 1) + WHERE action = 'play' + AND (started IS NULL OR total IS NULL OR started < 0 OR total <= 0) + AND position IS NOT NULL + AND position > 0 + """) + conn.commit() + + # Final cleanup: set minimal valid values for any remaining invalid actions + logger.info("Final cleanup: setting minimal valid values for remaining invalid actions...") + cursor.execute(""" + UPDATE "GpodderSyncEpisodeActions" + SET + started = 0, + total = 1 + WHERE action = 'play' + AND (started IS NULL OR total IS NULL OR started < 0 OR total <= 0) + """) + conn.commit() + + # Verify the fix + cursor.execute(""" + SELECT COUNT(*) + FROM "GpodderSyncEpisodeActions" + WHERE action = 'play' + AND (started IS NULL OR total IS NULL OR started < 0 OR total <= 0 OR position <= 0) + """) + remaining_result = cursor.fetchone() + remaining_broken = remaining_result[0] if remaining_result else 0 + + logger.info(f"Fixed {actions_to_fix - remaining_broken} episode actions (PostgreSQL)") + if remaining_broken > 0: + logger.warning(f"{remaining_broken} actions still have invalid fields - these may need manual review") + else: + logger.info("No actions need fixing (PostgreSQL)") + + else: # MySQL/MariaDB + # First, count how many actions need fixing + cursor.execute(""" + SELECT COUNT(*) + FROM GpodderSyncEpisodeActions + WHERE Action = 'play' + AND (Started IS NULL OR Total IS NULL OR Started < 0 OR Total <= 0) + """) + count_result = cursor.fetchone() + actions_to_fix = count_result[0] if count_result else 0 + + logger.info(f"Found {actions_to_fix} play actions that need fixing (MySQL)") + + if actions_to_fix > 0: + # MySQL: Update using JOIN + logger.info("Updating episode actions with duration from Episodes table...") + cursor.execute(""" + UPDATE GpodderSyncEpisodeActions AS gsa + LEFT JOIN Episodes e ON gsa.EpisodeURL = e.EpisodeURL + AND e.EpisodeDuration IS NOT NULL + AND e.EpisodeDuration > 0 + SET + gsa.Started = 0, + gsa.Total = COALESCE(e.EpisodeDuration, gsa.Position, 1) + WHERE gsa.Action = 'play' + AND (gsa.Started IS NULL OR gsa.Total IS NULL OR gsa.Started < 0 OR gsa.Total <= 0) + """) + conn.commit() + + # Verify the fix + cursor.execute(""" + SELECT COUNT(*) + FROM GpodderSyncEpisodeActions + WHERE Action = 'play' + AND (Started IS NULL OR Total IS NULL OR Started < 0 OR Total <= 0 OR Position <= 0) + """) + remaining_result = cursor.fetchone() + remaining_broken = remaining_result[0] if remaining_result else 0 + + logger.info(f"Fixed {actions_to_fix - remaining_broken} episode actions (MySQL)") + if remaining_broken > 0: + logger.warning(f"{remaining_broken} actions still have invalid fields - these may need manual review") + else: + logger.info("No actions need fixing (MySQL)") + + logger.info("GPodder episode actions fix migration completed successfully") + logger.info("AntennaPod should now be able to sync episode actions correctly") + + except Exception as e: + logger.error(f"Error in migration 107: {e}") + raise + finally: + cursor.close() + + if __name__ == "__main__": # Register all migrations and run them register_all_migrations() diff --git a/database_functions/migrations.py b/database_functions/migrations.py index 0ad1dad5..6a08787f 100644 --- a/database_functions/migrations.py +++ b/database_functions/migrations.py @@ -24,10 +24,14 @@ POSTGRES_AVAILABLE = False try: - import mysql.connector + import mariadb as mysql_connector MYSQL_AVAILABLE = True except ImportError: - MYSQL_AVAILABLE = False + try: + import mysql.connector + MYSQL_AVAILABLE = True + except ImportError: + MYSQL_AVAILABLE = False logger = logging.getLogger(__name__) @@ -78,8 +82,17 @@ def get_connection(self): self._connection = psycopg.connect(**self.connection_params) elif self.db_type == 'mysql': if not MYSQL_AVAILABLE: - raise ImportError("mysql-connector-python not available for MySQL connections") - self._connection = mysql.connector.connect(**self.connection_params) + raise ImportError("MariaDB/MySQL connector not available for MySQL connections") + # Use MariaDB connector parameters + mysql_params = self.connection_params.copy() + # Convert mysql.connector parameter names to mariadb parameter names + if 'connection_timeout' in mysql_params: + mysql_params['connect_timeout'] = mysql_params.pop('connection_timeout') + if 'charset' in mysql_params: + mysql_params.pop('charset') # MariaDB connector doesn't use charset parameter + if 'collation' in mysql_params: + mysql_params.pop('collation') # MariaDB connector doesn't use collation parameter + self._connection = mysql_connector.connect(**mysql_params) return self._connection @@ -304,6 +317,60 @@ def detect_existing_schema(self) -> List[str]: applied.append("004") logger.info("Detected existing schema for migration 004") + # Check for gpodder tables - if ANY exist, ALL gpodder migrations are applied + # (since they were created by the Go gpodder-api service and haven't changed) + gpodder_indicator_tables = ['"GpodderSyncMigrations"', '"GpodderSyncDeviceState"', + '"GpodderSyncSubscriptions"', '"GpodderSyncSettings"', + '"GpodderSessions"', '"GpodderSyncState"'] + gpodder_migration_versions = ["100", "101", "102", "103", "104"] + + gpodder_tables_exist = False + for table in gpodder_indicator_tables: + table_name = table.strip('"') + if self.db_type == 'postgresql': + cursor.execute(""" + SELECT EXISTS ( + SELECT FROM information_schema.tables + WHERE table_schema = 'public' AND table_name = %s + ) + """, (table_name,)) + else: # mysql + cursor.execute(""" + SELECT COUNT(*) + FROM information_schema.tables + WHERE table_schema = DATABASE() AND table_name = %s + """, (table_name,)) + + if cursor.fetchone()[0]: + gpodder_tables_exist = True + break + + if gpodder_tables_exist: + for version in gpodder_migration_versions: + if version not in applied: + applied.append(version) + logger.info(f"Detected existing gpodder tables, marking migration {version} as applied") + + # Check for PeopleEpisodes_backup table separately (migration 104) + backup_table = "PeopleEpisodes_backup" + if self.db_type == 'postgresql': + cursor.execute(""" + SELECT EXISTS ( + SELECT FROM information_schema.tables + WHERE table_schema = 'public' AND table_name = %s + ) + """, (backup_table,)) + else: # mysql + cursor.execute(""" + SELECT COUNT(*) + FROM information_schema.tables + WHERE table_schema = DATABASE() AND table_name = %s + """, (backup_table,)) + + if cursor.fetchone()[0] and "104" not in applied: + applied.append("104") + logger.info("Detected existing PeopleEpisodes_backup table, marking migration 104 as applied") + return applied except Exception as e: diff --git a/database_functions/mp3_metadata.py b/database_functions/mp3_metadata.py deleted file mode 100644 index eb48725f..00000000 --- a/database_functions/mp3_metadata.py +++ /dev/null @@ -1,65 +0,0 @@ -from mutagen.easyid3 import EasyID3 -from mutagen.id3 import ID3, APIC, ID3NoHeaderError -from mutagen.mp3 import MP3 -import mutagen -import requests - -def add_podcast_metadata(file_path, metadata): - """ - Add metadata to a downloaded podcast MP3 file. - - Args: - file_path (str): Path to the MP3 file - metadata (dict): Dictionary containing metadata with keys: - - title: Episode title - - artist: Podcast author/creator - - album: Podcast name - - date: Publication date - - description: Episode description - - artwork_url: URL to episode/podcast artwork - """ - try: - # Try to load existing ID3 tags, create them if they don't exist - try: - audio = EasyID3(file_path) - except mutagen.id3.ID3NoHeaderError: - audio = MP3(file_path) - audio.add_tags() - audio.save() - audio = EasyID3(file_path) - - # Add basic text metadata using valid EasyID3 keys - if 'title' in metadata: - audio['title'] = metadata['title'] - if 'artist' in metadata: - audio['artist'] = metadata['artist'] - if 'album' in metadata: - audio['album'] = metadata['album'] - if 'date' in metadata: - audio['date'] = metadata['date'] - - # Save the text metadata - audio.save() - - # Handle artwork separately (requires full ID3) - if 'artwork_url' in metadata and metadata['artwork_url']: - try: - # Download artwork - artwork_response = requests.get(metadata['artwork_url']) - artwork_response.raise_for_status() - - # Add artwork to the file - audio = ID3(file_path) - audio.add(APIC( - encoding=3, # UTF-8 - mime='image/jpeg', # Assume JPEG - type=3, # Cover image - desc='Cover', - data=artwork_response.content - )) - audio.save() - except Exception as e: - print(f"Failed to add artwork: {str(e)}") - - except Exception as e: - print(f"Failed to add metadata to {file_path}: {str(e)}") diff --git a/database_functions/oidc_state_manager.py b/database_functions/oidc_state_manager.py deleted file mode 100644 index a38d2840..00000000 --- a/database_functions/oidc_state_manager.py +++ /dev/null @@ -1,29 +0,0 @@ -from typing import Optional -from database_functions.valkey_client import valkey_client - -class OIDCStateManager: - def store_state(self, state: str, client_id: str) -> bool: - """Store OIDC state and client_id with 10 minute expiration""" - try: - key = f"oidc_state:{state}" - success = valkey_client.set(key, client_id) - if success: - valkey_client.expire(key, 600) # 10 minutes - return success - except Exception as e: - print(f"Error storing OIDC state: {e}") - return False - - def get_client_id(self, state: str) -> Optional[str]: - """Get client_id for state and delete it after retrieval""" - try: - key = f"oidc_state:{state}" - client_id = valkey_client.get(key) - if client_id: - valkey_client.delete(key) - return client_id - except Exception as e: - print(f"Error getting OIDC state: {e}") - return None - -oidc_state_manager = OIDCStateManager() diff --git a/database_functions/tasks.py b/database_functions/tasks.py index 8eec4f58..1d7a388c 100644 --- a/database_functions/tasks.py +++ b/database_functions/tasks.py @@ -307,14 +307,16 @@ def get_direct_db_connection(): conninfo = f"host={db_host} port={db_port} user={db_user} password={db_password} dbname={db_name}" return psycopg.connect(conninfo) else: # Default to MariaDB/MySQL - import mysql.connector - return mysql.connector.connect( + try: + import mariadb as mysql_connector + except ImportError: + import mysql.connector + return mysql_connector.connect( host=db_host, port=db_port, user=db_user, password=db_password, - database=db_name, - collation="utf8mb4_general_ci" + database=db_name ) def close_direct_db_connection(cnx): diff --git a/database_functions/validate_database.py b/database_functions/validate_database.py new file mode 100644 index 00000000..c0ab82f7 --- /dev/null +++ b/database_functions/validate_database.py @@ -0,0 +1,778 @@ +#!/usr/bin/env python3 +""" +Database Validator for PinePods + +This script validates that an existing database matches the expected schema +by using the migration system as the source of truth. + +Usage: + python validate_database.py --db-type mysql --db-host localhost --db-port 3306 --db-user root --db-password pass --db-name pinepods_database + python validate_database.py --db-type postgresql --db-host localhost --db-port 5432 --db-user postgres --db-password pass --db-name pinepods_database +""" + +import argparse +import sys +import os +import tempfile +import logging +from typing import Dict, List, Set, Tuple, Any, Optional +from dataclasses import dataclass +import importlib.util + +# Add the parent directory to path so we can import database_functions +parent_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +sys.path.insert(0, parent_dir) +sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))) + +try: + import mysql.connector + MYSQL_AVAILABLE = True +except ImportError: + MYSQL_AVAILABLE = False + +try: + import psycopg + POSTGRESQL_AVAILABLE = True +except ImportError: + POSTGRESQL_AVAILABLE = False + +from database_functions.migrations import get_migration_manager + + +@dataclass +class TableInfo: + """Information about a database table""" + name: str + columns: Dict[str, Dict[str, Any]] + indexes: Dict[str, Dict[str, Any]] + constraints: Dict[str, Dict[str, Any]] + + +@dataclass +class ValidationResult: + """Result of database validation""" + is_valid: bool + missing_tables: List[str] + extra_tables: List[str] + table_differences: Dict[str, Dict[str, Any]] + missing_indexes: List[Tuple[str, str]] # (table, index) + extra_indexes: List[Tuple[str, str]] + missing_constraints: List[Tuple[str, str]] # (table, constraint) + extra_constraints: List[Tuple[str, str]] + column_differences: Dict[str, Dict[str, Dict[str, Any]]] # table -> column -> differences + + +class DatabaseInspector: + """Base class for database inspection""" + + def __init__(self, connection): + self.connection = connection + + def get_tables(self) -> Set[str]: + """Get all table names""" + raise NotImplementedError + + def get_table_info(self, table_name: str) -> TableInfo: + """Get detailed information about a table""" + raise NotImplementedError + + def get_all_table_info(self) -> Dict[str, TableInfo]: + """Get information about all tables""" + tables = {} + for table_name in self.get_tables(): + tables[table_name] = self.get_table_info(table_name) + return tables + + +class MySQLInspector(DatabaseInspector): + """MySQL database inspector""" + + def get_tables(self) -> Set[str]: + cursor = self.connection.cursor() + cursor.execute("SHOW TABLES") + tables = {row[0] for row in cursor.fetchall()} + cursor.close() + return tables + + def get_table_info(self, table_name: str) -> TableInfo: + cursor = self.connection.cursor(dictionary=True) + + # Get column information + cursor.execute(f"DESCRIBE `{table_name}`") + columns = {} + for row in cursor.fetchall(): + columns[row['Field']] = { + 'type': row['Type'], + 'null': row['Null'], + 'key': row['Key'], + 'default': row['Default'], + 'extra': row['Extra'] + } + + # Get index information + cursor.execute(f"SHOW INDEX FROM `{table_name}`") + indexes = {} + for row in cursor.fetchall(): + index_name = row['Key_name'] + if index_name not in indexes: + indexes[index_name] = { + 'columns': [], + 'unique': not row['Non_unique'], + 'type': row['Index_type'] + } + indexes[index_name]['columns'].append(row['Column_name']) + + # Get constraint information (foreign keys, etc.) + cursor.execute(f""" + SELECT kcu.CONSTRAINT_NAME, tc.CONSTRAINT_TYPE, kcu.COLUMN_NAME, + kcu.REFERENCED_TABLE_NAME, kcu.REFERENCED_COLUMN_NAME + FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE kcu + JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS tc + ON kcu.CONSTRAINT_NAME = tc.CONSTRAINT_NAME + AND kcu.TABLE_SCHEMA = tc.TABLE_SCHEMA + WHERE kcu.TABLE_SCHEMA = DATABASE() AND kcu.TABLE_NAME = %s + AND kcu.REFERENCED_TABLE_NAME IS NOT NULL + """, (table_name,)) + + constraints = {} + for row in cursor.fetchall(): + constraint_name = row['CONSTRAINT_NAME'] + constraints[constraint_name] = { + 'type': 'FOREIGN KEY', + 'column': row['COLUMN_NAME'], + 'referenced_table': row['REFERENCED_TABLE_NAME'], + 'referenced_column': row['REFERENCED_COLUMN_NAME'] + } + + cursor.close() + return TableInfo(table_name, columns, indexes, constraints) + + +class PostgreSQLInspector(DatabaseInspector): + """PostgreSQL database inspector""" + + def get_tables(self) -> Set[str]: + cursor = self.connection.cursor() + cursor.execute(""" + SELECT table_name + FROM information_schema.tables + WHERE table_schema = 'public' AND table_type = 'BASE TABLE' + """) + tables = {row[0] for row in cursor.fetchall()} + cursor.close() + return tables + + def get_table_info(self, table_name: str) -> TableInfo: + cursor = self.connection.cursor() + + # Get column information + cursor.execute(""" + SELECT column_name, data_type, is_nullable, column_default, + character_maximum_length, numeric_precision, numeric_scale + FROM information_schema.columns + WHERE table_schema = 'public' AND table_name = %s + ORDER BY ordinal_position + """, (table_name,)) + + columns = {} + for row in cursor.fetchall(): + col_name, data_type, is_nullable, default, max_length, precision, scale = row + type_str = data_type + if max_length: + type_str += f"({max_length})" + elif precision: + if scale: + type_str += f"({precision},{scale})" + else: + type_str += f"({precision})" + + columns[col_name] = { + 'type': type_str, + 'null': is_nullable, + 'default': default, + 'max_length': max_length, + 'precision': precision, + 'scale': scale + } + + # Get index information + cursor.execute(""" + SELECT i.relname as index_name, + array_agg(a.attname ORDER BY c.ordinality) as columns, + ix.indisunique as is_unique, + ix.indisprimary as is_primary + FROM pg_class t + JOIN pg_index ix ON t.oid = ix.indrelid + JOIN pg_class i ON i.oid = ix.indexrelid + JOIN unnest(ix.indkey) WITH ORDINALITY c(colnum, ordinality) ON true + JOIN pg_attribute a ON a.attrelid = t.oid AND a.attnum = c.colnum + WHERE t.relname = %s AND t.relkind = 'r' + GROUP BY i.relname, ix.indisunique, ix.indisprimary + """, (table_name,)) + + indexes = {} + for row in cursor.fetchall(): + index_name, columns_list, is_unique, is_primary = row + indexes[index_name] = { + 'columns': columns_list, + 'unique': is_unique, + 'primary': is_primary + } + + # Get constraint information + cursor.execute(""" + SELECT con.conname as constraint_name, + con.contype as constraint_type, + array_agg(att.attname) as columns, + cl.relname as referenced_table, + array_agg(att2.attname) as referenced_columns + FROM pg_constraint con + JOIN pg_class t ON con.conrelid = t.oid + JOIN pg_attribute att ON att.attrelid = t.oid AND att.attnum = ANY(con.conkey) + LEFT JOIN pg_class cl ON con.confrelid = cl.oid + LEFT JOIN pg_attribute att2 ON att2.attrelid = cl.oid AND att2.attnum = ANY(con.confkey) + WHERE t.relname = %s + GROUP BY con.conname, con.contype, cl.relname + """, (table_name,)) + + constraints = {} + for row in cursor.fetchall(): + constraint_name, constraint_type, columns_list, ref_table, ref_columns = row + constraints[constraint_name] = { + 'type': constraint_type, + 'columns': columns_list, + 'referenced_table': ref_table, + 'referenced_columns': ref_columns + } + + cursor.close() + return TableInfo(table_name, columns, indexes, constraints) + + +class DatabaseValidator: + """Main database validator class""" + + def __init__(self, db_type: str, db_config: Dict[str, Any]): + self.db_type = db_type.lower() + # Normalize mariadb to mysql since they use the same connector + if self.db_type == 'mariadb': + self.db_type = 'mysql' + self.db_config = db_config + self.logger = logging.getLogger(__name__) + + def create_test_database(self) -> Tuple[Any, str]: + """Create a temporary database and run all migrations""" + if self.db_type == 'mysql': + return self._create_mysql_test_db() + elif self.db_type == 'postgresql': + return self._create_postgresql_test_db() + else: + raise ValueError(f"Unsupported database type: {self.db_type}") + + def _create_mysql_test_db(self) -> Tuple[Any, str]: + """Create MySQL test database""" + if not MYSQL_AVAILABLE: + raise ImportError("mysql-connector-python is required for MySQL validation") + + # Create temporary database name + import uuid + test_db_name = f"pinepods_test_{uuid.uuid4().hex[:8]}" + + # Connect to MySQL server + config = self.db_config.copy() + config.pop('database', None) # Remove database from config + config['use_pure'] = True # Use pure Python implementation to avoid auth plugin issues + + conn = mysql.connector.connect(**config) + cursor = conn.cursor() + + try: + # Create test database + cursor.execute(f"CREATE DATABASE `{test_db_name}` CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci") + cursor.execute(f"USE `{test_db_name}`") + cursor.close() + + # Run all migrations + self._run_migrations(conn, 'mysql') + + # Create a fresh connection to the test database for schema inspection + config['database'] = test_db_name + test_conn = mysql.connector.connect(**config) + + # Close the migration connection + conn.close() + + return test_conn, test_db_name + + except Exception as e: + if cursor: + cursor.close() + if conn: + conn.close() + raise e + + def _create_postgresql_test_db(self) -> Tuple[Any, str]: + """Create PostgreSQL test database""" + if not POSTGRESQL_AVAILABLE: + raise ImportError("psycopg is required for PostgreSQL validation") + + # Create temporary database name + import uuid + test_db_name = f"pinepods_test_{uuid.uuid4().hex[:8]}" + + # Connect to PostgreSQL server + config = self.db_config.copy() + config.pop('dbname', None) # Remove database from config + config['dbname'] = 'postgres' # Connect to default database + + conn = psycopg.connect(**config) + conn.autocommit = True + cursor = conn.cursor() + + try: + # Create test database + cursor.execute(f'CREATE DATABASE "{test_db_name}"') + cursor.close() + conn.close() + + # Connect to the new test database + config['dbname'] = test_db_name + test_conn = psycopg.connect(**config) + test_conn.autocommit = True + + # Run all migrations + self._run_migrations(test_conn, 'postgresql') + + return test_conn, test_db_name + + except Exception as e: + cursor.close() + conn.close() + raise e + + def _run_migrations(self, conn: Any, db_type: str): + """Run all migrations on the test database using existing migration system""" + # Set environment variables for the migration manager + import os + original_env = {} + + try: + # Backup original environment + for key in ['DB_TYPE', 'DB_HOST', 'DB_PORT', 'DB_USER', 'DB_PASSWORD', 'DB_NAME']: + original_env[key] = os.environ.get(key) + + # Set environment for test database + if db_type == 'mysql': + os.environ['DB_TYPE'] = 'mysql' + os.environ['DB_HOST'] = 'localhost' # We'll override the connection + os.environ['DB_PORT'] = '3306' + os.environ['DB_USER'] = 'test' + os.environ['DB_PASSWORD'] = 'test' + os.environ['DB_NAME'] = 'test' + else: + os.environ['DB_TYPE'] = 'postgresql' + os.environ['DB_HOST'] = 'localhost' + os.environ['DB_PORT'] = '5432' + os.environ['DB_USER'] = 'test' + os.environ['DB_PASSWORD'] = 'test' + os.environ['DB_NAME'] = 'test' + + # Import and register migrations + import database_functions.migration_definitions + + # Get migration manager and override its connection + manager = get_migration_manager() + manager._connection = conn + + # Run all migrations + success = manager.run_migrations() + if not success: + raise RuntimeError("Failed to apply migrations") + + finally: + # Restore original environment + for key, value in original_env.items(): + if value is not None: + os.environ[key] = value + elif key in os.environ: + del os.environ[key] + + def validate_database(self) -> ValidationResult: + """Validate the actual database against the expected schema""" + # Create test database with perfect schema + test_conn, test_db_name = self.create_test_database() + + try: + # Connect to actual database + actual_conn = self._connect_to_actual_database() + + try: + # Get schema information from both databases + if self.db_type == 'mysql': + expected_inspector = MySQLInspector(test_conn) + actual_inspector = MySQLInspector(actual_conn) + # Extract schemas + expected_schema = expected_inspector.get_all_table_info() + actual_schema = actual_inspector.get_all_table_info() + else: + # For PostgreSQL, create fresh connection for expected schema since migration manager closes it + fresh_test_conn = psycopg.connect( + host=self.db_config['host'], + port=self.db_config['port'], + user=self.db_config['user'], + password=self.db_config['password'], + dbname=test_db_name + ) + fresh_test_conn.autocommit = True + + try: + expected_inspector = PostgreSQLInspector(fresh_test_conn) + actual_inspector = PostgreSQLInspector(actual_conn) + + # Extract schemas + expected_schema = expected_inspector.get_all_table_info() + actual_schema = actual_inspector.get_all_table_info() + finally: + fresh_test_conn.close() + + # DEBUG: Print what we're actually comparing + print(f"\n🔍 DEBUG: Expected schema has {len(expected_schema)} tables:") + for table in sorted(expected_schema.keys()): + cols = list(expected_schema[table].columns.keys()) + print(f" {table}: {len(cols)} columns - {', '.join(cols[:5])}{'...' if len(cols) > 5 else ''}") + + print(f"\n🔍 DEBUG: Actual schema has {len(actual_schema)} tables:") + for table in sorted(actual_schema.keys()): + cols = list(actual_schema[table].columns.keys()) + print(f" {table}: {len(cols)} columns - {', '.join(cols[:5])}{'...' if len(cols) > 5 else ''}") + + # Check specifically for Playlists table + if 'Playlists' in expected_schema and 'Playlists' in actual_schema: + exp_cols = set(expected_schema['Playlists'].columns.keys()) + act_cols = set(actual_schema['Playlists'].columns.keys()) + print(f"\n🔍 DEBUG: Playlists comparison:") + print(f" Expected columns: {sorted(exp_cols)}") + print(f" Actual columns: {sorted(act_cols)}") + print(f" Missing from actual: {sorted(exp_cols - act_cols)}") + print(f" Extra in actual: {sorted(act_cols - exp_cols)}") + + # Compare schemas + result = self._compare_schemas(expected_schema, actual_schema) + + return result + + finally: + actual_conn.close() + + finally: + # Clean up test database - this will close test_conn + self._cleanup_test_database(test_conn, test_db_name) + + def _connect_to_actual_database(self) -> Any: + """Connect to the actual database""" + if self.db_type == 'mysql': + config = self.db_config.copy() + # Ensure autocommit is enabled for MySQL + config['autocommit'] = True + config['use_pure'] = True # Use pure Python implementation to avoid auth plugin issues + return mysql.connector.connect(**config) + else: + return psycopg.connect(**self.db_config) + + def _cleanup_test_database(self, test_conn: Any, test_db_name: str): + """Clean up the test database""" + try: + # Close the test connection first + if test_conn: + test_conn.close() + + if self.db_type == 'mysql': + config = self.db_config.copy() + config.pop('database', None) + config['use_pure'] = True # Use pure Python implementation to avoid auth plugin issues + cleanup_conn = mysql.connector.connect(**config) + cursor = cleanup_conn.cursor() + cursor.execute(f"DROP DATABASE IF EXISTS `{test_db_name}`") + cursor.close() + cleanup_conn.close() + else: + config = self.db_config.copy() + config.pop('dbname', None) + config['dbname'] = 'postgres' + cleanup_conn = psycopg.connect(**config) + cleanup_conn.autocommit = True + cursor = cleanup_conn.cursor() + cursor.execute(f'DROP DATABASE IF EXISTS "{test_db_name}"') + cursor.close() + cleanup_conn.close() + except Exception as e: + self.logger.warning(f"Failed to clean up test database {test_db_name}: {e}") + + def _compare_schemas(self, expected: Dict[str, TableInfo], actual: Dict[str, TableInfo]) -> ValidationResult: + """Compare expected and actual database schemas""" + expected_tables = set(expected.keys()) + actual_tables = set(actual.keys()) + + missing_tables = list(expected_tables - actual_tables) + extra_tables = list(actual_tables - expected_tables) + + table_differences = {} + missing_indexes = [] + extra_indexes = [] + missing_constraints = [] + extra_constraints = [] + column_differences = {} + + # Compare common tables + common_tables = expected_tables & actual_tables + for table_name in common_tables: + expected_table = expected[table_name] + actual_table = actual[table_name] + + # Compare columns + table_col_diffs = self._compare_columns(expected_table.columns, actual_table.columns) + if table_col_diffs: + column_differences[table_name] = table_col_diffs + + # Compare indexes + expected_indexes = set(expected_table.indexes.keys()) + actual_indexes = set(actual_table.indexes.keys()) + + for missing_idx in expected_indexes - actual_indexes: + missing_indexes.append((table_name, missing_idx)) + for extra_idx in actual_indexes - expected_indexes: + extra_indexes.append((table_name, extra_idx)) + + # Compare constraints + expected_constraints = set(expected_table.constraints.keys()) + actual_constraints = set(actual_table.constraints.keys()) + + for missing_const in expected_constraints - actual_constraints: + missing_constraints.append((table_name, missing_const)) + for extra_const in actual_constraints - expected_constraints: + extra_constraints.append((table_name, extra_const)) + + # Only fail on critical issues: + # - Missing tables (CRITICAL) + # - Missing columns (CRITICAL) + # Extra tables, extra columns, and type differences are warnings only + critical_issues = [] + critical_issues.extend(missing_tables) + + # Check for missing columns (critical) - but only in expected tables + for table, col_diffs in column_differences.items(): + # Skip extra tables entirely - they shouldn't be validated + if table in extra_tables: + continue + + for col, diff in col_diffs.items(): + if diff['status'] == 'missing': + critical_issues.append(f"missing column {col} in table {table}") + + is_valid = len(critical_issues) == 0 + + return ValidationResult( + is_valid=is_valid, + missing_tables=missing_tables, + extra_tables=extra_tables, + table_differences=table_differences, + missing_indexes=missing_indexes, + extra_indexes=extra_indexes, + missing_constraints=missing_constraints, + extra_constraints=extra_constraints, + column_differences=column_differences + ) + + def _compare_columns(self, expected: Dict[str, Dict[str, Any]], actual: Dict[str, Dict[str, Any]]) -> Dict[str, Dict[str, Any]]: + """Compare column definitions between expected and actual""" + differences = {} + + expected_cols = set(expected.keys()) + actual_cols = set(actual.keys()) + + # Missing columns + for missing_col in expected_cols - actual_cols: + differences[missing_col] = {'status': 'missing', 'expected': expected[missing_col]} + + # Extra columns + for extra_col in actual_cols - expected_cols: + differences[extra_col] = {'status': 'extra', 'actual': actual[extra_col]} + + # Different columns + for col_name in expected_cols & actual_cols: + expected_col = expected[col_name] + actual_col = actual[col_name] + + col_diffs = {} + for key in expected_col: + if key in actual_col and expected_col[key] != actual_col[key]: + col_diffs[key] = {'expected': expected_col[key], 'actual': actual_col[key]} + + if col_diffs: + differences[col_name] = {'status': 'different', 'differences': col_diffs} + + return differences + + +def print_validation_report(result: ValidationResult): + """Print a detailed validation report""" + print("=" * 80) + print("DATABASE VALIDATION REPORT") + print("=" * 80) + + # Count critical vs warning issues + critical_issues = [] + warning_issues = [] + + # Missing tables are critical + critical_issues.extend(result.missing_tables) + + # Missing columns are critical, others are warnings + for table, col_diffs in result.column_differences.items(): + for col, diff in col_diffs.items(): + if diff['status'] == 'missing': + critical_issues.append(f"Missing column {col} in table {table}") + else: + warning_issues.append((table, col, diff)) + + # Extra tables are warnings + warning_issues.extend([('EXTRA_TABLE', table, None) for table in result.extra_tables]) + + if result.is_valid: + if warning_issues: + print("✅ DATABASE IS VALID - No critical issues found!") + print("⚠️ Some warnings exist but don't affect functionality") + else: + print("✅ DATABASE IS PERFECT - All checks passed!") + else: + print("❌ DATABASE VALIDATION FAILED - Critical issues found") + + print() + + # Show critical issues + if critical_issues: + print("🔴 CRITICAL ISSUES (MUST BE FIXED):") + if result.missing_tables: + print(" Missing Tables:") + for table in result.missing_tables: + print(f" - {table}") + + # Show missing columns + for table, col_diffs in result.column_differences.items(): + missing_cols = [col for col, diff in col_diffs.items() if diff['status'] == 'missing'] + if missing_cols: + print(f" Missing Columns in {table}:") + for col in missing_cols: + print(f" - {col}") + print() + + # Show warnings + if warning_issues: + print("⚠️ WARNINGS (ACCEPTABLE DIFFERENCES):") + + if result.extra_tables: + print(" Extra Tables (ignored):") + for table in result.extra_tables: + print(f" - {table}") + + # Show column warnings + for table, col_diffs in result.column_differences.items(): + table_warnings = [] + for col, diff in col_diffs.items(): + if diff['status'] == 'extra': + table_warnings.append(f"Extra column: {col}") + elif diff['status'] == 'different': + details = [] + for key, values in diff['differences'].items(): + details.append(f"{key}: {values}") + table_warnings.append(f"Different column {col}: {', '.join(details)}") + + if table_warnings: + print(f" Table {table}:") + for warning in table_warnings: + print(f" - {warning}") + print() + + if result.missing_indexes: + print("🟡 MISSING INDEXES:") + for table, index in result.missing_indexes: + print(f" - {table}.{index}") + print() + + if result.extra_indexes: + print("🟡 EXTRA INDEXES:") + for table, index in result.extra_indexes: + print(f" - {table}.{index}") + print() + + if result.missing_constraints: + print("🟡 MISSING CONSTRAINTS:") + for table, constraint in result.missing_constraints: + print(f" - {table}.{constraint}") + print() + + if result.extra_constraints: + print("🟡 EXTRA CONSTRAINTS:") + for table, constraint in result.extra_constraints: + print(f" - {table}.{constraint}") + print() + + +def main(): + """Main function""" + parser = argparse.ArgumentParser(description='Validate PinePods database schema') + parser.add_argument('--db-type', required=True, choices=['mysql', 'mariadb', 'postgresql'], help='Database type') + parser.add_argument('--db-host', required=True, help='Database host') + parser.add_argument('--db-port', required=True, type=int, help='Database port') + parser.add_argument('--db-user', required=True, help='Database user') + parser.add_argument('--db-password', required=True, help='Database password') + parser.add_argument('--db-name', required=True, help='Database name') + parser.add_argument('--verbose', '-v', action='store_true', help='Enable verbose logging') + + args = parser.parse_args() + + # Set up logging + level = logging.DEBUG if args.verbose else logging.INFO + logging.basicConfig(level=level, format='%(asctime)s - %(levelname)s - %(message)s') + + # Build database config + if args.db_type in ['mysql', 'mariadb']: + db_config = { + 'host': args.db_host, + 'port': args.db_port, + 'user': args.db_user, + 'password': args.db_password, + 'database': args.db_name, + 'charset': 'utf8mb4', + 'collation': 'utf8mb4_unicode_ci' + } + else: # postgresql + db_config = { + 'host': args.db_host, + 'port': args.db_port, + 'user': args.db_user, + 'password': args.db_password, + 'dbname': args.db_name + } + + try: + # Create validator and run validation + validator = DatabaseValidator(args.db_type, db_config) + result = validator.validate_database() + + # Print report + print_validation_report(result) + + # Exit with appropriate code + sys.exit(0 if result.is_valid else 1) + + except Exception as e: + logging.error(f"Validation failed with error: {e}") + if args.verbose: + import traceback + traceback.print_exc() + sys.exit(2) + + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/database_functions/valkey_client.py b/database_functions/valkey_client.py deleted file mode 100644 index 09970a61..00000000 --- a/database_functions/valkey_client.py +++ /dev/null @@ -1,64 +0,0 @@ -import os -import redis -from redis.exceptions import RedisError - -class ValkeyClient: - def __init__(self): - self.host = os.environ.get("VALKEY_HOST", "localhost") - self.port = int(os.environ.get("VALKEY_PORT", 6379)) - self.client = None - - def connect(self): - try: - self.client = redis.Redis( - host=self.host, - port=self.port, - decode_responses=True, - health_check_interval=10, - socket_connect_timeout=5, - retry_on_timeout=True, - socket_keepalive=True - ) - self.client.ping() # Test the connection - print("Successfully connected to Valkey") - except RedisError as e: - print(f"Failed to connect to Valkey: {e}") - self.client = None - - def get(self, key): - if not self.client: - self.connect() - try: - return self.client.get(key) - except RedisError as e: - print(f"Error getting key from Valkey: {e}") - return None - - def set(self, key, value): - if not self.client: - self.connect() - try: - return self.client.set(key, value) - except RedisError as e: - print(f"Error setting key in Valkey: {e}") - return False - - def delete(self, key): - if not self.client: - self.connect() - try: - return self.client.delete(key) - except RedisError as e: - print(f"Error deleting key from Valkey: {e}") - return False - - def expire(self, key: str, seconds: int) -> bool: - if not self.client: - self.connect() - try: - return bool(self.client.expire(key, seconds)) if self.client else False - except RedisError as e: - print(f"Error setting expiry in Valkey: {e}") - return False - -valkey_client = ValkeyClient() diff --git a/database_functions/websocket_broadcaster.py b/database_functions/websocket_broadcaster.py deleted file mode 100644 index 0e6860f1..00000000 --- a/database_functions/websocket_broadcaster.py +++ /dev/null @@ -1,40 +0,0 @@ -# websocket_broadcaster.py - Simple HTTP-based WebSocket broadcaster -import requests -import logging -import json - -class WebSocketBroadcaster: - def __init__(self): - # Hard-coded to use the internal container port - self.server_url = "http://localhost:8032" - - def broadcast_task_update(self, user_id, task_data, api_key): - """Send task update via HTTP to the broadcast endpoint""" - try: - # Prepare the message - message = { - "event": "update", - "task": task_data - } - - # Send to the broadcast endpoint - response = requests.post( - f"{self.server_url}/api/tasks/broadcast", - json={"user_id": user_id, "message": message}, - headers={"Api-Key": api_key}, - timeout=2 # Short timeout to avoid blocking - ) - - # Check result - if response.status_code == 200: - print(f"Successfully sent update for task {task_data.get('task_id')}") - return True - else: - print(f"Failed to send update: {response.status_code} - {response.text}") - return False - except Exception as e: - print(f"Error sending broadcast: {str(e)}") - return False - -# Create a singleton instance -broadcaster = WebSocketBroadcaster() diff --git a/database_functions/youtube.py b/database_functions/youtube.py deleted file mode 100644 index b9d31590..00000000 --- a/database_functions/youtube.py +++ /dev/null @@ -1,275 +0,0 @@ -from typing import Dict, Optional -from yt_dlp import YoutubeDL -from fastapi import HTTPException -import logging -import os -import datetime -from datetime import timedelta -import logging -from bs4 import BeautifulSoup -import time -import random -from database_functions import functions - - -async def get_channel_info(channel_id: str) -> Dict: - """ - Get YouTube channel info using yt-dlp - """ - ydl_opts = { - 'quiet': True, - 'extract_flat': True, - 'no_warnings': True, - 'playlist_items': '0', # Just get channel info, not videos - 'socket_timeout': 30, # 30 second socket timeout - 'timeout': 60, # 60 second overall timeout - } - print('in get channel info') - - try: - with YoutubeDL(ydl_opts) as ydl: - channel_url = f"https://www.youtube.com/channel/{channel_id}" - channel_info = ydl.extract_info( - channel_url, - download=False, - process=False - ) - print(f'get info {channel_info}') - - # Get avatar URL - thumbnail_url = None - if channel_info and channel_info.get('thumbnails'): - avatar_thumbnails = [t for t in channel_info['thumbnails'] - if t.get('id', '').startswith('avatar')] - - if avatar_thumbnails: - thumbnail_url = avatar_thumbnails[-1]['url'] - else: - avatar_thumbnails = [t for t in channel_info['thumbnails'] - if 'avatar' in t.get('url', '').lower()] - if avatar_thumbnails: - thumbnail_url = avatar_thumbnails[-1]['url'] - else: - thumbnail_url = channel_info['thumbnails'][0]['url'] - print('did a bunch of thumbnail stuff') - return { - 'channel_id': channel_id, - 'name': channel_info.get('channel', '') or channel_info.get('title', ''), - 'description': channel_info.get('description', '')[:500] if channel_info.get('description') else '', - 'thumbnail_url': thumbnail_url, - } - - except Exception as e: - logging.error(f"Error getting channel info: {e}") - raise HTTPException( - status_code=500, - detail=f"Error fetching channel info: {str(e)}" - ) - -def download_youtube_audio(video_id: str, output_path: str): - """Download audio for a YouTube video""" - # Remove .mp3 extension if present to prevent double extension - if output_path.endswith('.mp3'): - base_path = output_path[:-4] - else: - base_path = output_path - - ydl_opts = { - 'format': 'bestaudio/best', - 'postprocessors': [{ - 'key': 'FFmpegExtractAudio', - 'preferredcodec': 'mp3', - }], - 'outtmpl': base_path, - 'ignoreerrors': True, # Add this to not fail on individual errors - 'socket_timeout': 30, # 30 second socket timeout - 'timeout': 60, # 60 second overall timeout - } - with YoutubeDL(ydl_opts) as ydl: - ydl.download([f"https://www.youtube.com/watch?v={video_id}"]) - - -def process_youtube_videos(database_type, podcast_id: int, channel_id: str, cnx, feed_cutoff: int): - """Background task to process videos and download audio""" - - logging.basicConfig(level=logging.INFO) - logger = logging.getLogger(__name__) - - logger.info("="*50) - logger.info(f"Starting YouTube channel processing") - logger.info(f"Podcast ID: {podcast_id}") - logger.info(f"Channel ID: {channel_id}") - logger.info("="*50) - - try: - cutoff_date = datetime.datetime.now(datetime.timezone.utc) - timedelta(days=feed_cutoff) - logger.info(f"Cutoff date set to: {cutoff_date}") - - logger.info("Cleaning up videos older than cutoff date...") - functions.remove_old_youtube_videos(cnx, database_type, podcast_id, cutoff_date) - - ydl_opts = { - 'quiet': True, - 'no_warnings': True, - 'extract_flat': True, # Fast initial fetch - 'ignoreerrors': True, - 'socket_timeout': 30, # 30 second socket timeout - 'timeout': 60, # 60 second overall timeout - } - - logger.info("Initializing YouTube-DL with options:") - logger.info(str(ydl_opts)) - - recent_videos = [] - with YoutubeDL(ydl_opts) as ydl: - channel_url = f"https://www.youtube.com/channel/{channel_id}/videos" - logger.info(f"Fetching channel data from: {channel_url}") - - try: - results = ydl.extract_info(channel_url, download=False) - logger.info("Initial channel data fetch successful") - logger.info(f"Raw result keys: {results.keys() if results else 'No results'}") - except Exception as e: - logger.error(f"Failed to fetch channel data: {str(e)}") - raise - - if not results or 'entries' not in results: - logger.error(f"No video list found in results") - logger.error(f"Available keys: {results.keys() if results else 'None'}") - return - - logger.info(f"Found {len(results.get('entries', []))} total videos") - - # Process each video - for entry in results.get('entries', []): - if not entry or not entry.get('id'): - logger.warning(f"Skipping invalid entry: {entry}") - continue - - try: - video_id = entry['id'] - logger.info(f"\nProcessing video ID: {video_id}") - - # Get upload date using BS4 method - published = functions.get_video_date(video_id) - if not published: - logger.warning(f"Could not determine upload date for video {video_id}, skipping") - continue - - logger.info(f"Video publish date: {published}") - - if published <= cutoff_date: - logger.info(f"Video {video_id} from {published} is too old, stopping processing") - break - - video_data = { - 'id': video_id, - 'title': entry['title'], - 'description': entry.get('description', ''), - 'url': f"https://www.youtube.com/watch?v={video_id}", - 'thumbnail': entry.get('thumbnails', [{}])[0].get('url', '') if entry.get('thumbnails') else '', - 'publish_date': published, - 'duration': entry.get('duration', 0) - } - - logger.info("Collected video data:") - logger.info(str(video_data)) - - recent_videos.append(video_data) - logger.info(f"Successfully added video {video_id} to processing queue") - - except Exception as e: - logger.error(f"Error processing video entry:") - logger.error(f"Entry data: {entry}") - logger.error(f"Error: {str(e)}") - logger.exception("Full traceback:") - continue - - logger.info(f"\nProcessing complete - Found {len(recent_videos)} recent videos") - - if recent_videos: - logger.info("\nStarting database updates") - try: - # Get existing videos first - existing_videos = functions.get_existing_youtube_videos(cnx, database_type, podcast_id) - - # Filter out videos that already exist - new_videos = [] - for video in recent_videos: - video_url = f"https://www.youtube.com/watch?v={video['id']}" - if video_url not in existing_videos: - new_videos.append(video) - else: - logger.info(f"Video already exists, skipping: {video['title']}") - - if new_videos: - functions.add_youtube_videos(cnx, database_type, podcast_id, new_videos) - logger.info(f"Successfully added {len(new_videos)} new videos") - else: - logger.info("No new videos to add") - except Exception as e: - logger.error("Failed to add videos to database") - logger.error(str(e)) - logger.exception("Full traceback:") - raise - - logger.info("\nStarting audio downloads") - successful_downloads = 0 - failed_downloads = 0 - - for video in recent_videos: - try: - output_path = f"/opt/pinepods/downloads/youtube/{video['id']}.mp3" - output_path_double = f"{output_path}.mp3" - - logger.info(f"\nProcessing download for video: {video['id']}") - logger.info(f"Title: {video['title']}") - logger.info(f"Target path: {output_path}") - - if os.path.exists(output_path) or os.path.exists(output_path_double): - logger.info(f"Audio file already exists, skipping download") - continue - - logger.info("Starting download...") - download_youtube_audio(video['id'], output_path) - logger.info("Download completed successfully") - successful_downloads += 1 - - except Exception as e: - # This is the key fix - properly catch all exceptions and continue - failed_downloads += 1 - - # Check for specific error types to provide better logging - error_msg = str(e) - if "members-only content" in error_msg.lower(): - logger.warning(f"Skipping video {video['id']} - Members-only content: {video['title']}") - elif "private" in error_msg.lower(): - logger.warning(f"Skipping video {video['id']} - Private video: {video['title']}") - elif "unavailable" in error_msg.lower(): - logger.warning(f"Skipping video {video['id']} - Unavailable video: {video['title']}") - else: - logger.error(f"Failed to download video {video['id']}: {video['title']}") - logger.error(f"Error: {error_msg}") - - # Always continue to the next video - continue - - logger.info(f"\nDownload summary: {successful_downloads} successful, {failed_downloads} failed") - else: - logger.info("No new videos to process") - - except Exception as e: - logger.error("\nFatal error in process_youtube_channel") - logger.error(str(e)) - logger.exception("Full traceback:") - raise e - finally: - # Use recalculate to ensure accuracy - try: - functions.update_episode_count(cnx, database_type, podcast_id) - except Exception as e: - logger.error(f"Failed to update episode count: {str(e)}") - logger.info("\nCleaning up database connection") - logger.info("="*50) - logger.info("Channel processing complete") - logger.info("="*50) diff --git a/deployment/docker/compose-files/docker-compose-mariadb/docker-compose.yml b/deployment/docker/compose-files/docker-compose-mariadb/docker-compose.yml new file mode 100644 index 00000000..c536266c --- /dev/null +++ b/deployment/docker/compose-files/docker-compose-mariadb/docker-compose.yml @@ -0,0 +1,55 @@ +services: + db: + container_name: db + image: mariadb:12 + command: --wait_timeout=1800 + environment: + MYSQL_TCP_PORT: 3306 + MYSQL_ROOT_PASSWORD: myS3curepass + MYSQL_DATABASE: pinepods_database + MYSQL_COLLATION_SERVER: utf8mb4_unicode_ci + MYSQL_CHARACTER_SET_SERVER: utf8mb4 + MYSQL_INIT_CONNECT: "SET @@GLOBAL.max_allowed_packet=64*1024*1024;" + volumes: + - /home/user/pinepods/sql:/var/lib/mysql + restart: always + + valkey: + image: valkey/valkey:8-alpine + + pinepods: + image: madeofpendletonwool/pinepods:latest + ports: + - "8040:8040" + environment: + # Basic Server Info + SEARCH_API_URL: "https://search.pinepods.online/api/search" + PEOPLE_API_URL: "https://people.pinepods.online" + HOSTNAME: "http://localhost:8040" + # Database Vars + DB_TYPE: mariadb + DB_HOST: db + DB_PORT: 3306 + DB_USER: root + DB_PASSWORD: myS3curepass + DB_NAME: pinepods_database + # Valkey Settings + VALKEY_HOST: valkey + VALKEY_PORT: 6379 + # Enable or Disable Debug Mode for additional Printing + DEBUG_MODE: false + PUID: ${UID:-911} + PGID: ${GID:-911} + # Add timezone configuration + TZ: "America/New_York" + + volumes: + # Mount the download and backup locations on the server + - /home/user/pinepods/downloads:/opt/pinepods/downloads + - /home/user/pinepods/backups:/opt/pinepods/backups + # Timezone volumes, HIGHLY optional. Read the timezone notes below + - /etc/localtime:/etc/localtime:ro + - /etc/timezone:/etc/timezone:ro + depends_on: + - db + - valkey diff --git a/deployment/docker/compose-files/docker-compose-mysql/docker-compose.yml b/deployment/docker/compose-files/docker-compose-mysql/docker-compose.yml index 80af5318..3b9d6f19 100644 --- a/deployment/docker/compose-files/docker-compose-mysql/docker-compose.yml +++ b/deployment/docker/compose-files/docker-compose-mysql/docker-compose.yml @@ -1,6 +1,7 @@ services: db: - image: mariadb:latest + container_name: db + image: mysql:9 command: --wait_timeout=1800 environment: MYSQL_TCP_PORT: 3306 @@ -11,27 +12,20 @@ services: MYSQL_INIT_CONNECT: "SET @@GLOBAL.max_allowed_packet=64*1024*1024;" volumes: - /home/user/pinepods/sql:/var/lib/mysql - ports: - - "3306:3306" restart: always + valkey: image: valkey/valkey:8-alpine - ports: - - "6379:6379" + pinepods: image: madeofpendletonwool/pinepods:latest ports: - # Pinepods Main Port - "8040:8040" environment: # Basic Server Info SEARCH_API_URL: "https://search.pinepods.online/api/search" PEOPLE_API_URL: "https://people.pinepods.online" - # Default Admin User Information - USERNAME: myadminuser01 - PASSWORD: myS3curepass - FULLNAME: Pinepods Admin - EMAIL: user@pinepods.online + HOSTNAME: "http://localhost:8040" # Database Vars DB_TYPE: mariadb DB_HOST: db @@ -43,17 +37,24 @@ services: VALKEY_HOST: valkey VALKEY_PORT: 6379 # Enable or Disable Debug Mode for additional Printing - DEBUG_MODE: False + DEBUG_MODE: false PUID: ${UID:-911} PGID: ${GID:-911} # Add timezone configuration TZ: "America/New_York" + # Language Configuration + DEFAULT_LANGUAGE: "en" volumes: # Mount the download and the backup location on the server if you want to. You could mount a nas to the downloads folder or something like that. # The backups directory is used if backups are made on the web version on pinepods. When taking backups on the client version it downloads them locally. + volumes: + # Mount the download and backup locations on the server - /home/user/pinepods/downloads:/opt/pinepods/downloads - /home/user/pinepods/backups:/opt/pinepods/backups + # Timezone volumes, HIGHLY optional. Read the timezone notes below + - /etc/localtime:/etc/localtime:ro + - /etc/timezone:/etc/timezone:ro depends_on: - db - valkey diff --git a/deployment/docker/compose-files/docker-compose-postgres/docker-compose.yml b/deployment/docker/compose-files/docker-compose-postgres/docker-compose.yml index bf80ff7b..e9d348a6 100644 --- a/deployment/docker/compose-files/docker-compose-postgres/docker-compose.yml +++ b/deployment/docker/compose-files/docker-compose-postgres/docker-compose.yml @@ -1,20 +1,20 @@ services: db: - image: postgres:latest + container_name: db + image: postgres:17 environment: - POSTGRES_DB: pypods_database + POSTGRES_DB: pinepods_database POSTGRES_USER: postgres POSTGRES_PASSWORD: myS3curepass PGDATA: /var/lib/postgresql/data/pgdata volumes: - /home/user/pinepods/pgdata:/var/lib/postgresql/data - ports: - - "5432:5432" restart: always + valkey: image: valkey/valkey:8-alpine - ports: - - "6379:6379" + restart: always + pinepods: image: madeofpendletonwool/pinepods:latest ports: @@ -23,11 +23,7 @@ services: # Basic Server Info SEARCH_API_URL: "https://search.pinepods.online/api/search" PEOPLE_API_URL: "https://people.pinepods.online" - # Default Admin User Information - USERNAME: myadminuser01 - PASSWORD: myS3curepass - FULLNAME: Pinepods Admin - EMAIL: user@pinepods.online + HOSTNAME: "http://localhost:8040" # Database Vars DB_TYPE: postgresql DB_HOST: db @@ -36,18 +32,24 @@ services: DB_PASSWORD: myS3curepass DB_NAME: pinepods_database # Valkey Settings - VALKEY_HOST: post-valkey + VALKEY_HOST: valkey VALKEY_PORT: 6379 # Enable or Disable Debug Mode for additional Printing - DEBUG_MODE: False + DEBUG_MODE: false PUID: ${UID:-911} PGID: ${GID:-911} # Add timezone configuration TZ: "America/New_York" + # Language Configuration + DEFAULT_LANGUAGE: "en" volumes: - # Mount the download location on the server if you want to. You could mount a NAS to this folder or something similar + # Mount the download and backup locations on the server - /home/user/pinepods/downloads:/opt/pinepods/downloads - /home/user/pinepods/backups:/opt/pinepods/backups + # Timezone volumes, HIGHLY optional. Read the timezone notes below + - /etc/localtime:/etc/localtime:ro + - /etc/timezone:/etc/timezone:ro + restart: always depends_on: - db - valkey diff --git a/deployment/kubernetes/helm/pinepods/Chart.yaml b/deployment/kubernetes/helm/pinepods/Chart.yaml index bab54baa..832fd855 100644 --- a/deployment/kubernetes/helm/pinepods/Chart.yaml +++ b/deployment/kubernetes/helm/pinepods/Chart.yaml @@ -10,4 +10,4 @@ dependencies: - name: valkey version: 2.0.1 repository: https://charts.bitnami.com/bitnami - condition: valkey.enabled \ No newline at end of file + condition: valkey.enabled diff --git a/deployment/kubernetes/helm/pinepods/templates/backend-secret.yaml b/deployment/kubernetes/helm/pinepods/templates/backend-secret.yaml index e57581a7..8d5cfb3d 100644 --- a/deployment/kubernetes/helm/pinepods/templates/backend-secret.yaml +++ b/deployment/kubernetes/helm/pinepods/templates/backend-secret.yaml @@ -9,4 +9,5 @@ type: Opaque stringData: API_KEY: {{ .Values.backend.secrets.apiKey | quote }} API_SECRET: {{ .Values.backend.secrets.apiSecret | quote }} + YOUTUBE_API_KEY: {{ .Values.backend.secrets.youtubeApiKey | quote }} {{- end }} \ No newline at end of file diff --git a/deployment/kubernetes/helm/pinepods/templates/deployment.yaml b/deployment/kubernetes/helm/pinepods/templates/deployment.yaml index 5ef79057..96b0007b 100644 --- a/deployment/kubernetes/helm/pinepods/templates/deployment.yaml +++ b/deployment/kubernetes/helm/pinepods/templates/deployment.yaml @@ -29,13 +29,16 @@ spec: - secretRef: name: {{ include "pinepods.fullname" $ }}-env env: - {{ if (and (not .Values.postgresql.enabled) (.Values.externalDatabase.existingSecret.enabled)) -}} + {{ if (and (not .Values.postgresql.enabled) (.Values.externalDatabase.existingSecret.enabled) (not .Values.externalDatabase.manuallyProvide)) -}} - name: DB_PASSWORD valueFrom: secretKeyRef: name: {{ .Values.externalDatabase.existingSecret.name }} key: {{ .Values.externalDatabase.existingSecret.key }} {{- end }} + {{- with .Values.extraEnv}} + {{- toYaml . | nindent 12 }} + {{- end }} volumeMounts: {{- if .Values.persistence.enabled }} - name: downloads diff --git a/deployment/kubernetes/helm/pinepods/templates/secret.yaml b/deployment/kubernetes/helm/pinepods/templates/secret.yaml index 441c480b..0592e4d4 100644 --- a/deployment/kubernetes/helm/pinepods/templates/secret.yaml +++ b/deployment/kubernetes/helm/pinepods/templates/secret.yaml @@ -8,7 +8,7 @@ {{ $_ := set $env "DB_NAME" "pinepods_database" }} {{ $_ := set $env "DB_USER" "postgres" }} {{ $_ := set $env "DB_PASSWORD" .Values.postgresql.auth.password }} -{{ else }} +{{ else if not .Values.externalDatabase.manuallyProvide }} {{ $_ := set $env "DB_TYPE" .Values.externalDatabase.type }} {{ $_ := set $env "DB_HOST" .Values.externalDatabase.host }} {{ $_ := set $env "DB_PORT" .Values.externalDatabase.port }} diff --git a/deployment/kubernetes/helm/pinepods/values.yaml b/deployment/kubernetes/helm/pinepods/values.yaml index 81076a17..5a8ddbf3 100644 --- a/deployment/kubernetes/helm/pinepods/values.yaml +++ b/deployment/kubernetes/helm/pinepods/values.yaml @@ -80,6 +80,8 @@ postgresql: # -- Enable PostgreSQL deployment # Set to false if using external database enabled: true + image: + repository: bitnamilegacy/postgresql auth: # -- PostgreSQL username username: postgres @@ -130,6 +132,8 @@ externalDatabase: enabled: false name: existing-secret key: password + # -- manually provide the DB environment variables under extraEnv. Useful for DB solutions like CNPG + manuallyProvide: false resources: {} @@ -137,6 +141,8 @@ resources: {} valkey: # -- Enable Valkey deployment enabled: true + image: + repository: bitnamilegacy/valkey architecture: standalone # This prevents replica creation auth: enabled: false @@ -178,6 +184,16 @@ env: # Set to true for additional logging DEBUG_MODE: "false" +## Allows you to specify additional environment values directly on the application pod. Useful for specific configmap or secret references. +extraEnv: [] +# - name: CUSTOM_VARIABLE +# value: "true" +# - name: DB_HOST +# valueFrom: +# secretKeyRef: +# name: pinepods-db-app +# key: host + ## Pod Security Context securityContext: {} # fsGroup: 2000 @@ -219,6 +235,7 @@ backend: secrets: apiKey: "MYPODCASTINDEXKEY" apiSecret: "MYPODCASTINDEXSECRET" + youtubeApiKey: "YOUR_YOUTUBE_API_KEY_HERE" ingress: enabled: true className: "" @@ -263,4 +280,4 @@ podpeople: paths: - path: / pathType: Prefix - tls: [] \ No newline at end of file + tls: [] diff --git a/dockerfile b/dockerfile index d2eadb2e..e5a2e39e 100644 --- a/dockerfile +++ b/dockerfile @@ -38,14 +38,14 @@ COPY ./gpodder-api/config ./config COPY ./gpodder-api/internal ./internal # Build the application -RUN CGO_ENABLED=0 GOOS=linux go build -o gpodder-api ./cmd/server/ +RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o gpodder-api ./cmd/server/ # Python builder stage for database setup FROM python:3.11-alpine AS python-builder WORKDIR /build -# Install build dependencies for PyInstaller -RUN apk add --no-cache gcc musl-dev libffi-dev openssl-dev +# Install build dependencies for PyInstaller and MariaDB connector +RUN apk add --no-cache gcc musl-dev libffi-dev openssl-dev mariadb-connector-c-dev # Copy Python source files COPY ./database_functions ./database_functions @@ -90,24 +90,35 @@ ENV OPENSSL_LIB_DIR=/usr/lib ENV OPENSSL_INCLUDE_DIR=/usr/include # Build the Rust API -RUN cargo build --release +RUN cargo build --release && strip target/release/pinepods-api # Final stage for setting up runtime environment FROM alpine # Metadata LABEL maintainer="Collin Pendleton " -# Install runtime dependencies (removed python3, py3-pip, cronie, and openrc) -RUN apk add --no-cache tzdata nginx openssl bash mariadb-client postgresql-client curl ffmpeg supervisor wget jq +# Install runtime dependencies +RUN apk add --no-cache tzdata nginx openssl bash mariadb-client postgresql-client curl ffmpeg wget jq mariadb-connector-c-dev -# Download and install latest yt-dlp binary + +# Download and install latest yt-dlp binary (musllinux for Alpine) RUN LATEST_VERSION=$(curl -s https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest | jq -r .tag_name) && \ - wget -O /usr/local/bin/yt-dlp "https://github.com/yt-dlp/yt-dlp/releases/download/${LATEST_VERSION}/yt-dlp_linux" && \ + wget -O /usr/local/bin/yt-dlp "https://github.com/yt-dlp/yt-dlp/releases/download/${LATEST_VERSION}/yt-dlp_musllinux" && \ chmod +x /usr/local/bin/yt-dlp + +# Download and install Horust (x86_64) +RUN wget -O /tmp/horust.tar.gz "https://github.com/FedericoPonzi/Horust/releases/download/v0.1.11/horust-x86_64-unknown-linux-musl.tar.gz" && \ + cd /tmp && tar -xzf horust.tar.gz && \ + mv horust /usr/local/bin/ && \ + chmod +x /usr/local/bin/horust && \ + rm -f /tmp/horust.tar.gz + ENV TZ=UTC # Copy compiled database setup binary (replaces Python dependency) COPY --from=python-builder /build/dist/pinepods-db-setup /usr/local/bin/ # Copy built files from the builder stage to the Nginx serving directory COPY --from=builder /app/dist /var/www/html/ +# Copy translation files for the Rust API to access +COPY ./web/src/translations /var/www/html/static/translations # Copy Go API binary from the go-builder stage COPY --from=go-builder /gpodder-api/gpodder-api /usr/local/bin/ # Copy Rust API binary from the rust-api-builder stage @@ -119,7 +130,7 @@ COPY startup/startup.sh /startup.sh RUN chmod +x /startup.sh # Copy Pinepods runtime files RUN mkdir -p /pinepods -RUN mkdir -p /var/log/supervisor/ +RUN mkdir -p /var/log/pinepods/ && mkdir -p /etc/horust/services/ COPY startup/ /pinepods/startup/ # Legacy cron scripts removed - background tasks now handled by internal Rust scheduler COPY clients/ /pinepods/clients/ diff --git a/dockerfile-arm b/dockerfile-arm index cf42d836..fdf61547 100644 --- a/dockerfile-arm +++ b/dockerfile-arm @@ -3,9 +3,6 @@ FROM rust:alpine AS builder # Install build dependencies RUN apk update && apk upgrade && \ apk add --no-cache musl-dev libffi-dev zlib-dev jpeg-dev -# Install wasm target and build tools -RUN rustup target add wasm32-unknown-unknown && \ - cargo install wasm-bindgen-cli RUN apk update && apk upgrade # Add the Edge Community repository RUN echo "@edge http://dl-cdn.alpinelinux.org/alpine/edge/community" >> /etc/apk/repositories @@ -13,25 +10,69 @@ RUN echo "@edge http://dl-cdn.alpinelinux.org/alpine/edge/community" >> /etc/apk RUN apk update # Install the desired package from the edge community repository RUN apk add trunk@edge +# Install wasm target and build tools +RUN rustup target add wasm32-unknown-unknown && \ + cargo install wasm-bindgen-cli && \ + cargo install horust --locked + +# Test wasm-bindgen installation before full build +RUN echo "Testing wasm-bindgen installation..." && \ + which wasm-bindgen && \ + wasm-bindgen --version && \ + ls -la /usr/local/cargo/bin/ && \ + echo "wasm-bindgen test completed" + +# Test trunk installation +RUN echo "Testing trunk installation..." && \ + which trunk && \ + trunk --version && \ + echo "trunk test completed" + # Add application files to the builder stage COPY ./web/Cargo.lock ./web/Cargo.toml ./web/dev-info.md ./web/index.html ./web/tailwind.config.js ./web/Trunk.toml /app/ COPY ./web/src /app/src COPY ./web/static /app/static WORKDIR /app -# Initialize trunk first -RUN cd /tmp && \ - echo "" > dummy.rs && \ - trunk build dummy.rs || true -RUN cargo install --locked wasm-bindgen-cli -# Build the Yew application in release mode -RUN RUSTFLAGS="--cfg=web_sys_unstable_apis --cfg getrandom_backend=\"wasm_js\"" trunk build --features server_build --release + +# Test that trunk can find wasm-bindgen before full build +RUN echo "Testing if trunk can find wasm-bindgen..." && \ + RUST_LOG=debug trunk build --help && \ + echo "trunk can find wasm-bindgen" +# Auto-detect wasm-bindgen version and replace trunk's glibc binary with our musl one +RUN WASM_BINDGEN_VERSION=$(grep -A1 "name = \"wasm-bindgen\"" /app/Cargo.lock | grep "version = " | cut -d'"' -f2) && \ + echo "Detected wasm-bindgen version: $WASM_BINDGEN_VERSION" && \ + RUSTFLAGS="--cfg=web_sys_unstable_apis --cfg getrandom_backend=\"wasm_js\"" timeout 30 trunk build --features server_build --release || \ + (echo "Build failed as expected, replacing downloaded binary..." && \ + mkdir -p /root/.cache/trunk/wasm-bindgen-$WASM_BINDGEN_VERSION && \ + cp /usr/local/cargo/bin/wasm-bindgen /root/.cache/trunk/wasm-bindgen-$WASM_BINDGEN_VERSION/ && \ + echo "Retrying build with musl binary..." && \ + RUSTFLAGS="--cfg=web_sys_unstable_apis --cfg getrandom_backend=\"wasm_js\"" trunk build --features server_build --release) + +# Go builder stage for the gpodder API +FROM golang:alpine AS go-builder +WORKDIR /gpodder-api + +# Install build dependencies +RUN apk add --no-cache git + +# Copy go module files first for better layer caching +COPY ./gpodder-api/go.mod ./gpodder-api/go.sum ./ +RUN go mod download + +# Copy the rest of the source code +COPY ./gpodder-api/cmd ./cmd +COPY ./gpodder-api/config ./config +COPY ./gpodder-api/internal ./internal + +# Build the application +RUN CGO_ENABLED=0 GOOS=linux go build -o gpodder-api ./cmd/server/ # Python builder stage for database setup FROM python:3.11-alpine AS python-builder WORKDIR /build -# Install build dependencies for PyInstaller -RUN apk add --no-cache gcc musl-dev libffi-dev openssl-dev +# Install build dependencies for PyInstaller and MariaDB connector +RUN apk add --no-cache gcc musl-dev libffi-dev openssl-dev mariadb-connector-c-dev # Copy Python source files COPY ./database_functions ./database_functions @@ -59,64 +100,83 @@ RUN pyinstaller --onefile \ --console \ startup/setup_database_new.py +# Rust API builder stage +FROM rust:alpine AS rust-api-builder +WORKDIR /rust-api + +# Install build dependencies +RUN apk add --no-cache musl-dev pkgconfig openssl-dev openssl-libs-static + +# Copy Rust API files +COPY ./rust-api/Cargo.toml ./rust-api/Cargo.lock ./ +COPY ./rust-api/src ./src + +# Set environment for static linking +ENV OPENSSL_STATIC=1 +ENV OPENSSL_LIB_DIR=/usr/lib +ENV OPENSSL_INCLUDE_DIR=/usr/include + +# Build the Rust API +RUN cargo build --release + # Final stage for setting up runtime environment FROM alpine # Metadata LABEL maintainer="Collin Pendleton " -# Install runtime dependencies (removed Python) -RUN apk update && apk upgrade && \ - apk add --no-cache nginx tzdata openssl bash mariadb-client curl ffmpeg supervisor wget jq && \ - rm -rf /var/cache/apk/* +# Install runtime dependencies +RUN apk add --no-cache tzdata nginx openssl bash mariadb-client postgresql-client curl ffmpeg wget jq mariadb-connector-c-dev -# Download and install latest yt-dlp binary for ARM64 + +# Download and install latest yt-dlp binary for ARM64 (musllinux for Alpine) RUN LATEST_VERSION=$(curl -s https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest | jq -r .tag_name) && \ - wget -O /usr/local/bin/yt-dlp "https://github.com/yt-dlp/yt-dlp/releases/download/${LATEST_VERSION}/yt-dlp_linux_aarch64" && \ + wget -O /usr/local/bin/yt-dlp "https://github.com/yt-dlp/yt-dlp/releases/download/${LATEST_VERSION}/yt-dlp_musllinux_aarch64" && \ chmod +x /usr/local/bin/yt-dlp -ENV TZ=UTC -# Set environment variables -ENV APP_ROOT=/pinepods +# Copy Horust binary from builder stage +COPY --from=builder /usr/local/cargo/bin/horust /usr/local/bin/ +ENV TZ=UTC # Copy compiled database setup binary (replaces Python dependency) COPY --from=python-builder /build/dist/pinepods-db-setup /usr/local/bin/ - -# Copy wait-for-it script -COPY wait-for-it/wait-for-it.sh /wait-for-it.sh -RUN chmod +x /wait-for-it.sh - -# Copy built files from the builder stage +# Copy built files from the builder stage to the Nginx serving directory COPY --from=builder /app/dist /var/www/html/ - -# Set up application directories and files +# Copy translation files for the Rust API to access +COPY ./web/src/translations /var/www/html/static/translations +# Copy Go API binary from the go-builder stage +COPY --from=go-builder /gpodder-api/gpodder-api /usr/local/bin/ +# Copy Rust API binary from the rust-api-builder stage +COPY --from=rust-api-builder /rust-api/target/release/pinepods-api /usr/local/bin/ +# Move to the root directory to execute the startup script WORKDIR / +# Copy startup scripts COPY startup/startup.sh /startup.sh RUN chmod +x /startup.sh - -RUN mkdir -p /pinepods /var/log/supervisor/ +# Copy Pinepods runtime files +RUN mkdir -p /pinepods +RUN mkdir -p /var/log/pinepods/ && mkdir -p /etc/horust/services/ COPY startup/ /pinepods/startup/ +# Legacy cron scripts removed - background tasks now handled by internal Rust scheduler COPY clients/ /pinepods/clients/ COPY database_functions/ /pinepods/database_functions/ -# Legacy cron scripts removed - background tasks now handled by internal Rust scheduler RUN chmod +x /pinepods/startup/startup.sh - -# Clean things up -RUN rm -rf \ - /var/cache/apk/* \ - /root/.cache \ - /tmp/* \ - /var/tmp/* \ - /usr/share/man \ - /usr/share/doc - -# Define and set the version +ENV APP_ROOT=/pinepods +# Define the build argument ARG PINEPODS_VERSION +# Write the Pinepods version to the current_version file RUN echo "${PINEPODS_VERSION}" > /pinepods/current_version +# Configure Nginx +COPY startup/nginx.conf /etc/nginx/nginx.conf + +# Copy script to start gpodder API +COPY ./gpodder-api/start-gpodder.sh /usr/local/bin/ +RUN chmod +x /usr/local/bin/start-gpodder.sh RUN cp /usr/share/zoneinfo/UTC /etc/localtime && \ echo "UTC" > /etc/timezone -# Configure Nginx -COPY startup/nginx.conf /etc/nginx/nginx.conf +# Expose ports +EXPOSE 8080 8000 +# Start everything using the startup script ENTRYPOINT ["bash", "/startup.sh"] diff --git a/docs/index.yaml b/docs/index.yaml index 60af1f82..6a118d21 100644 --- a/docs/index.yaml +++ b/docs/index.yaml @@ -2,7 +2,45 @@ apiVersion: v1 entries: pinepods: - apiVersion: v2 - created: "2025-05-14T12:45:57.821873003Z" + created: "2025-11-04T13:32:05.132884772Z" + dependencies: + - condition: postgresql.enabled + name: postgresql + repository: https://charts.bitnami.com/bitnami + version: 15.5.14 + - condition: valkey.enabled + name: valkey + repository: https://charts.bitnami.com/bitnami + version: 2.0.1 + description: A Helm chart for deploying Pinepods - A complete podcast management + system and allows you to play, download, and keep track of podcasts you enjoy. + All self hosted and enjoyed on your own server! + digest: dd25bb9bde17df0b5d3fbb8a56aa23fe358734577870f8801d55123ef6a80570 + name: pinepods + urls: + - https://helm.pinepods.online/pinepods-0.8.2.tgz + version: 0.8.2 + - apiVersion: v2 + created: "2025-11-04T13:32:05.123523925Z" + dependencies: + - condition: postgresql.enabled + name: postgresql + repository: https://charts.bitnami.com/bitnami + version: 15.5.14 + - condition: valkey.enabled + name: valkey + repository: https://charts.bitnami.com/bitnami + version: 2.0.1 + description: A Helm chart for deploying Pinepods - A complete podcast management + system and allows you to play, download, and keep track of podcasts you enjoy. + All self hosted and enjoyed on your own server! + digest: 28e32586ecbdfc1749890007055c61add7b78076cee90980d425113b38b13b9c + name: pinepods + urls: + - https://helm.pinepods.online/pinepods-0.8.1.tgz + version: 0.8.1 + - apiVersion: v2 + created: "2025-11-04T13:32:05.114082893Z" dependencies: - condition: postgresql.enabled name: postgresql @@ -21,7 +59,7 @@ entries: - https://helm.pinepods.online/pinepods-0.7.8.tgz version: 0.7.8 - apiVersion: v2 - created: "2025-05-14T12:45:57.81219332Z" + created: "2025-11-04T13:32:05.104601718Z" dependencies: - condition: postgresql.enabled name: postgresql @@ -40,7 +78,7 @@ entries: - https://helm.pinepods.online/pinepods-0.7.7.tgz version: 0.7.7 - apiVersion: v2 - created: "2025-05-14T12:45:57.802612391Z" + created: "2025-11-04T13:32:05.095243553Z" dependencies: - condition: postgresql.enabled name: postgresql @@ -59,7 +97,7 @@ entries: - https://helm.pinepods.online/pinepods-0.7.6.tgz version: 0.7.6 - apiVersion: v2 - created: "2025-05-14T12:45:57.793019863Z" + created: "2025-11-04T13:32:05.08593408Z" dependencies: - condition: postgresql.enabled name: postgresql @@ -78,7 +116,7 @@ entries: - https://helm.pinepods.online/pinepods-0.7.5.tgz version: 0.7.5 - apiVersion: v2 - created: "2025-05-14T12:45:57.783514329Z" + created: "2025-11-04T13:32:05.076444979Z" dependencies: - condition: postgresql.enabled name: postgresql @@ -97,7 +135,7 @@ entries: - https://helm.pinepods.online/pinepods-0.7.4.tgz version: 0.7.4 - apiVersion: v2 - created: "2025-05-14T12:45:57.77388794Z" + created: "2025-11-04T13:32:05.067177875Z" dependencies: - condition: postgresql.enabled name: postgresql @@ -116,7 +154,7 @@ entries: - https://helm.pinepods.online/pinepods-0.7.3.tgz version: 0.7.3 - apiVersion: v2 - created: "2025-05-14T12:45:57.764225682Z" + created: "2025-11-04T13:32:05.057181838Z" dependencies: - condition: postgresql.enabled name: postgresql @@ -135,7 +173,7 @@ entries: - https://helm.pinepods.online/pinepods-0.7.2.tgz version: 0.7.2 - apiVersion: v2 - created: "2025-05-14T12:45:57.754821944Z" + created: "2025-11-04T13:32:05.04708939Z" dependencies: - condition: postgresql.enabled name: postgresql @@ -154,7 +192,7 @@ entries: - https://helm.pinepods.online/pinepods-0.7.1.tgz version: 0.7.1 - apiVersion: v2 - created: "2025-05-14T12:45:57.744826392Z" + created: "2025-11-04T13:32:05.037629207Z" dependencies: - condition: postgresql.enabled name: postgresql @@ -173,7 +211,7 @@ entries: - https://helm.pinepods.online/pinepods-0.7.0.tgz version: 0.7.0 - apiVersion: v2 - created: "2025-05-14T12:45:57.734393365Z" + created: "2025-11-04T13:32:05.028436822Z" dependencies: - name: postgresql repository: https://charts.bitnami.com/bitnami @@ -187,7 +225,7 @@ entries: - https://helm.pinepods.online/pinepods-0.6.6.tgz version: 0.6.6 - apiVersion: v2 - created: "2025-05-14T12:45:57.730412407Z" + created: "2025-11-04T13:32:05.023937037Z" dependencies: - name: postgresql repository: https://charts.bitnami.com/bitnami @@ -201,7 +239,7 @@ entries: - https://helm.pinepods.online/pinepods-0.6.5.tgz version: 0.6.5 - apiVersion: v2 - created: "2025-05-14T12:45:57.725514296Z" + created: "2025-11-04T13:32:05.020029522Z" dependencies: - name: postgresql repository: https://charts.bitnami.com/bitnami @@ -215,7 +253,7 @@ entries: - https://helm.pinepods.online/pinepods-0.6.4.tgz version: 0.6.4 - apiVersion: v2 - created: "2025-05-14T12:45:57.72084837Z" + created: "2025-11-04T13:32:05.015236734Z" dependencies: - name: postgresql repository: https://charts.bitnami.com/bitnami @@ -229,7 +267,7 @@ entries: - https://helm.pinepods.online/pinepods-0.6.3.tgz version: 0.6.3 - apiVersion: v2 - created: "2025-05-14T12:45:57.716786601Z" + created: "2025-11-04T13:32:05.010471203Z" dependencies: - name: postgresql repository: https://charts.bitnami.com/bitnami @@ -242,4 +280,4 @@ entries: urls: - https://helm.pinepods.online/pinepods-0.6.2.tgz version: 0.6.2 -generated: "2025-05-14T12:45:57.711649484Z" +generated: "2025-11-04T13:32:05.006291589Z" diff --git a/docs/pinepods-0.8.1.tgz b/docs/pinepods-0.8.1.tgz new file mode 100644 index 00000000..81b0d6ba Binary files /dev/null and b/docs/pinepods-0.8.1.tgz differ diff --git a/docs/pinepods-0.8.2.tgz b/docs/pinepods-0.8.2.tgz new file mode 100644 index 00000000..deb75083 Binary files /dev/null and b/docs/pinepods-0.8.2.tgz differ diff --git a/fastlane/metadata/android/en-US/full_description.txt b/fastlane/metadata/android/en-US/full_description.txt index 1084350b..80dc00a6 100644 --- a/fastlane/metadata/android/en-US/full_description.txt +++ b/fastlane/metadata/android/en-US/full_description.txt @@ -1,6 +1,7 @@ PinePods is a complete podcast management solution that allows you to host your own podcast server and enjoy a beautiful mobile experience. Features: + • Self-hosted podcast server synchronization • Beautiful, intuitive mobile interface • Download episodes for offline listening @@ -14,4 +15,4 @@ Features: PinePods gives you complete control over your podcast experience while providing the convenience of modern podcast apps. Perfect for users who want privacy, control, and a great listening experience. -Note: This app requires a PinePods server to be set up. Visit the PinePods GitHub repository for server installation instructions. \ No newline at end of file +Note: This app requires a PinePods server to be set up. Visit the PinePods GitHub repository for server installation instructions. diff --git a/fastlane/metadata/android/en-US/images/featureGraphic.png b/fastlane/metadata/android/en-US/images/featureGraphic.png new file mode 100644 index 00000000..e7aab93a Binary files /dev/null and b/fastlane/metadata/android/en-US/images/featureGraphic.png differ diff --git a/fastlane/metadata/android/en-US/images/icon.png b/fastlane/metadata/android/en-US/images/icon.png new file mode 100644 index 00000000..4fe781cf Binary files /dev/null and b/fastlane/metadata/android/en-US/images/icon.png differ diff --git a/fastlane/metadata/android/en-US/images/phoneScreenshots/1.png b/fastlane/metadata/android/en-US/images/phoneScreenshots/1.png new file mode 100644 index 00000000..a5c21ee7 Binary files /dev/null and b/fastlane/metadata/android/en-US/images/phoneScreenshots/1.png differ diff --git a/fastlane/metadata/android/en-US/images/phoneScreenshots/2.png b/fastlane/metadata/android/en-US/images/phoneScreenshots/2.png new file mode 100644 index 00000000..668c4072 Binary files /dev/null and b/fastlane/metadata/android/en-US/images/phoneScreenshots/2.png differ diff --git a/fastlane/metadata/android/en-US/images/phoneScreenshots/3.png b/fastlane/metadata/android/en-US/images/phoneScreenshots/3.png new file mode 100644 index 00000000..6182ba2b Binary files /dev/null and b/fastlane/metadata/android/en-US/images/phoneScreenshots/3.png differ diff --git a/fastlane/metadata/android/en-US/images/phoneScreenshots/4.png b/fastlane/metadata/android/en-US/images/phoneScreenshots/4.png new file mode 100644 index 00000000..1d45d009 Binary files /dev/null and b/fastlane/metadata/android/en-US/images/phoneScreenshots/4.png differ diff --git a/fastlane/metadata/android/en-US/images/phoneScreenshots/5.png b/fastlane/metadata/android/en-US/images/phoneScreenshots/5.png new file mode 100644 index 00000000..4c004426 Binary files /dev/null and b/fastlane/metadata/android/en-US/images/phoneScreenshots/5.png differ diff --git a/gpodder-api/internal/api/auth.go b/gpodder-api/internal/api/auth.go index 651ca9bf..93bffa12 100644 --- a/gpodder-api/internal/api/auth.go +++ b/gpodder-api/internal/api/auth.go @@ -30,7 +30,6 @@ type argon2Params struct { // AuthMiddleware creates a middleware for authentication func AuthMiddleware(db *db.PostgresDB) gin.HandlerFunc { return func(c *gin.Context) { - log.Printf("[DEBUG] AuthMiddleware processing request: %s %s", c.Request.Method, c.Request.URL.Path) // Get the username from the URL parameters username := c.Param("username") @@ -41,12 +40,9 @@ func AuthMiddleware(db *db.PostgresDB) gin.HandlerFunc { return } - log.Printf("[DEBUG] AuthMiddleware: Processing request for username: %s", username) - // Check if this is an internal API call via X-GPodder-Token gpodderTokenHeader := c.GetHeader("X-GPodder-Token") if gpodderTokenHeader != "" { - log.Printf("[DEBUG] AuthMiddleware: Found X-GPodder-Token header") // Get user data var userID int @@ -76,7 +72,6 @@ func AuthMiddleware(db *db.PostgresDB) gin.HandlerFunc { // For internal calls with X-GPodder-Token header, validate token directly if gpodderToken.Valid && gpodderToken.String == gpodderTokenHeader { - log.Printf("[DEBUG] AuthMiddleware: X-GPodder-Token validated for user: %s", username) c.Set("userID", userID) c.Set("username", username) c.Next() @@ -176,13 +171,11 @@ func AuthMiddleware(db *db.PostgresDB) gin.HandlerFunc { // Check if this is a gpodder token authentication // Check if this is a gpodder token authentication if gpodderToken.Valid && (gpodderToken.String == password || gpodderToken.String == gpodderTokenHeader) { - log.Printf("[DEBUG] AuthMiddleware: User authenticated with gpodder token: %s", username) authenticated = true } // If token auth didn't succeed, try password authentication if !authenticated && verifyPassword(password, hashedPassword) { - log.Printf("[DEBUG] AuthMiddleware: User authenticated with password: %s", username) authenticated = true } @@ -589,18 +582,34 @@ func AuthenticationMiddleware(database *db.Database) gin.HandlerFunc { log.Printf("[DEBUG] AuthenticationMiddleware processing request: %s %s", c.Request.Method, c.Request.URL.Path) - if strings.Contains(c.Request.URL.Path, "/episodes/") && strings.HasSuffix(c.Request.URL.Path, ".json") { - // Extract username from URL path for episode actions + // Handle GPodder API standard .json suffix patterns + if strings.HasSuffix(c.Request.URL.Path, ".json") { parts := strings.Split(c.Request.URL.Path, "/") - if len(parts) >= 3 { - // The path format is /episodes/username.json + var username string + + // Handle /episodes/username.json pattern + if strings.Contains(c.Request.URL.Path, "/episodes/") && len(parts) >= 3 { usernameWithExt := parts[len(parts)-1] - // Remove .json extension - username := strings.TrimSuffix(usernameWithExt, ".json") - // Set it as the username parameter - c.Params = append(c.Params, gin.Param{Key: "username", Value: username}) + username = strings.TrimSuffix(usernameWithExt, ".json") log.Printf("[DEBUG] AuthenticationMiddleware: Extracted username '%s' from episode actions URL", username) } + + // Handle /devices/username.json pattern + if strings.Contains(c.Request.URL.Path, "/devices/") { + for i, part := range parts { + if part == "devices" && i+1 < len(parts) { + usernameWithExt := parts[i+1] + username = strings.TrimSuffix(usernameWithExt, ".json") + log.Printf("[DEBUG] AuthenticationMiddleware: Extracted username '%s' from devices URL", username) + break + } + } + } + + // Set username parameter if extracted + if username != "" { + c.Params = append(c.Params, gin.Param{Key: "username", Value: username}) + } } // First try session auth diff --git a/gpodder-api/internal/api/device.go b/gpodder-api/internal/api/device.go index 3fb75cb2..88502485 100644 --- a/gpodder-api/internal/api/device.go +++ b/gpodder-api/internal/api/device.go @@ -154,8 +154,6 @@ func updateDeviceData(database *db.Database) gin.HandlerFunc { return } - log.Printf("[DEBUG] All URL parameters: %v", c.Params) - // Get device name from URL with fix for .json suffix deviceName := c.Param("deviceid") // Also try alternative parameter name if needed diff --git a/gpodder-api/internal/api/episode.go b/gpodder-api/internal/api/episode.go index cf22106f..9f0c45f0 100644 --- a/gpodder-api/internal/api/episode.go +++ b/gpodder-api/internal/api/episode.go @@ -18,7 +18,6 @@ import ( // getEpisodeActions handles GET /api/2/episodes/{username}.json func getEpisodeActions(database *db.Database) gin.HandlerFunc { return func(c *gin.Context) { - log.Printf("[DEBUG] getEpisodeActions handling request: %s %s", c.Request.Method, c.Request.URL.Path) // Get user ID from middleware userID, exists := c.Get("userID") @@ -34,9 +33,6 @@ func getEpisodeActions(database *db.Database) gin.HandlerFunc { deviceName := c.Query("device") aggregated := c.Query("aggregated") == "true" - log.Printf("[DEBUG] getEpisodeActions: Parameters - since=%s, podcast=%s, device=%s, aggregated=%v", - sinceStr, podcastURL, deviceName, aggregated) - // Get device ID if provided var deviceID *int if deviceName != "" { @@ -105,7 +101,14 @@ func getEpisodeActions(database *db.Database) gin.HandlerFunc { latestTimestamp = time.Now().Unix() // Fallback to current time } - // Build query based on parameters + // Performance optimization: Add limits and optimize query structure + const MAX_EPISODE_ACTIONS = 25000 // Limit raised to 25k to handle power users while preventing DoS + + // Log query performance info + log.Printf("[DEBUG] getEpisodeActions: Query for user %v with since=%d, device=%s, aggregated=%v", + userID, since, deviceName, aggregated) + + // Build query based on parameters with performance optimizations var queryParts []string if database.IsPostgreSQLDB() { @@ -113,7 +116,7 @@ func getEpisodeActions(database *db.Database) gin.HandlerFunc { "SELECT " + "e.ActionID, e.UserID, e.DeviceID, e.PodcastURL, e.EpisodeURL, " + "e.Action, e.Timestamp, e.Started, e.Position, e.Total, " + - "d.DeviceName " + + "COALESCE(d.DeviceName, '') as DeviceName " + "FROM \"GpodderSyncEpisodeActions\" e " + "LEFT JOIN \"GpodderDevices\" d ON e.DeviceID = d.DeviceID " + "WHERE e.UserID = $1", @@ -123,7 +126,7 @@ func getEpisodeActions(database *db.Database) gin.HandlerFunc { "SELECT " + "e.ActionID, e.UserID, e.DeviceID, e.PodcastURL, e.EpisodeURL, " + "e.Action, e.Timestamp, e.Started, e.Position, e.Total, " + - "d.DeviceName " + + "COALESCE(d.DeviceName, '') as DeviceName " + "FROM GpodderSyncEpisodeActions e " + "LEFT JOIN GpodderDevices d ON e.DeviceID = d.DeviceID " + "WHERE e.UserID = ?", @@ -182,8 +185,9 @@ func getEpisodeActions(database *db.Database) gin.HandlerFunc { e.Timestamp = la.max_timestamp LEFT JOIN "GpodderDevices" d ON e.DeviceID = d.DeviceID WHERE e.UserID = $1 - ORDER BY e.Timestamp DESC - `, conditionsStr) + ORDER BY e.Timestamp ASC + LIMIT %d + `, conditionsStr, MAX_EPISODE_ACTIONS) } else { // For MySQL, we need to use ? placeholders and rebuild the argument list args = []interface{}{userID} // Reset args to just include userID for now @@ -235,8 +239,9 @@ func getEpisodeActions(database *db.Database) gin.HandlerFunc { e.Timestamp = la.max_timestamp LEFT JOIN GpodderDevices d ON e.DeviceID = d.DeviceID WHERE e.UserID = ? - ORDER BY e.Timestamp DESC - `, conditionsStr) + ORDER BY e.Timestamp ASC + LIMIT %d + `, conditionsStr, MAX_EPISODE_ACTIONS) } } else { // Simple query with ORDER BY @@ -275,18 +280,34 @@ func getEpisodeActions(database *db.Database) gin.HandlerFunc { } } + // ORDER BY DESC (newest first) to prioritize recent actions + // This ensures recent play state is synced first, even if total actions > limit queryParts = append(queryParts, "ORDER BY e.Timestamp DESC") + + // Add LIMIT for performance - prevents returning massive datasets + // Clients should use the 'since' parameter to paginate through results + if database.IsPostgreSQLDB() { + queryParts = append(queryParts, fmt.Sprintf("LIMIT %d", MAX_EPISODE_ACTIONS)) + } else { + queryParts = append(queryParts, fmt.Sprintf("LIMIT %d", MAX_EPISODE_ACTIONS)) + } + query = strings.Join(queryParts, " ") } - // Execute query + // Execute query with timing + startTime := time.Now() rows, err := database.Query(query, args...) + queryDuration := time.Since(startTime) + if err != nil { - log.Printf("[ERROR] getEpisodeActions: Error querying episode actions: %v", err) + log.Printf("[ERROR] getEpisodeActions: Error querying episode actions (took %v): %v", queryDuration, err) c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to get episode actions"}) return } defer rows.Close() + + log.Printf("[DEBUG] getEpisodeActions: Query executed in %v", queryDuration) // Build response actions := make([]models.EpisodeAction, 0) @@ -343,8 +364,9 @@ func getEpisodeActions(database *db.Database) gin.HandlerFunc { // Continue with what we've got so far } - log.Printf("[DEBUG] getEpisodeActions: Returning %d actions with timestamp %d", - len(actions), latestTimestamp) + // Log performance results + totalDuration := time.Since(startTime) + log.Printf("[DEBUG] getEpisodeActions: Returning %d actions, total time: %v", len(actions), totalDuration) // Return response in gpodder format c.JSON(http.StatusOK, models.EpisodeActionsResponse{ @@ -357,7 +379,6 @@ func getEpisodeActions(database *db.Database) gin.HandlerFunc { // uploadEpisodeActions handles POST /api/2/episodes/{username}.json func uploadEpisodeActions(database *db.Database) gin.HandlerFunc { return func(c *gin.Context) { - log.Printf("[DEBUG] uploadEpisodeActions handling request: %s %s", c.Request.Method, c.Request.URL.Path) // Get user ID from middleware userID, exists := c.Get("userID") @@ -384,8 +405,6 @@ func uploadEpisodeActions(database *db.Database) gin.HandlerFunc { actions = wrappedActions.Actions } - log.Printf("[DEBUG] uploadEpisodeActions: Received %d actions to process", len(actions)) - // Begin transaction tx, err := database.Begin() if err != nil { @@ -447,7 +466,6 @@ func uploadEpisodeActions(database *db.Database) gin.HandlerFunc { if err != nil { if err == sql.ErrNoRows { // Create the device if it doesn't exist - log.Printf("[DEBUG] uploadEpisodeActions: Creating new device: %s", action.Device) if database.IsPostgreSQLDB() { query = ` @@ -527,8 +545,6 @@ func uploadEpisodeActions(database *db.Database) gin.HandlerFunc { if parsedTime, err := time.Parse(format, t); err == nil { actionTimestamp = parsedTime.Unix() parsed = true - log.Printf("[DEBUG] uploadEpisodeActions: Parsed timestamp '%s' with format '%s' to Unix timestamp %d", - t, format, actionTimestamp) break } } @@ -673,8 +689,6 @@ func uploadEpisodeActions(database *db.Database) gin.HandlerFunc { return } - log.Printf("[DEBUG] uploadEpisodeActions: Successfully processed %d actions", len(actions)) - // Return response c.JSON(http.StatusOK, models.EpisodeActionResponse{ Timestamp: timestamp, diff --git a/gpodder-api/internal/api/subscriptions.go b/gpodder-api/internal/api/subscriptions.go index 980c9566..506b9dda 100644 --- a/gpodder-api/internal/api/subscriptions.go +++ b/gpodder-api/internal/api/subscriptions.go @@ -22,6 +22,9 @@ import ( // Maximum number of subscriptions per user const MAX_SUBSCRIPTIONS = 5000 +// Limits for subscription sync to prevent overwhelming responses +const MAX_SUBSCRIPTION_CHANGES = 5000 // Reasonable limit for subscription changes per sync + // sanitizeURL cleans and validates a URL func sanitizeURL(rawURL string) (string, error) { // Trim leading/trailing whitespace @@ -63,14 +66,16 @@ func sanitizeURL(rawURL string) (string, error) { // getSubscriptions handles GET /api/2/subscriptions/{username}/{deviceid} func getSubscriptions(database *db.Database) gin.HandlerFunc { return func(c *gin.Context) { - log.Printf("[DEBUG] getSubscriptions handling request: %s %s", c.Request.Method, c.Request.URL.Path) + log.Printf("[DEBUG] getSubscriptions: Starting request processing - %s %s", c.Request.Method, c.Request.URL.Path) // Get user ID from middleware userID, exists := c.Get("userID") if !exists { + log.Printf("[ERROR] getSubscriptions: userID not found in context") c.JSON(http.StatusUnauthorized, gin.H{"error": "Unauthorized"}) return } + log.Printf("[DEBUG] getSubscriptions: userID found: %v", userID) // Get device ID from URL - with fix for .json suffix deviceName := c.Param("deviceid") @@ -185,8 +190,8 @@ func getSubscriptions(database *db.Database) gin.HandlerFunc { } defer rows.Close() - // Build subscription list - var podcasts []string + // Build subscription list - ensure never nil + podcasts := make([]string, 0) for rows.Next() { var url string if err := rows.Scan(&url); err != nil { @@ -225,6 +230,11 @@ func getSubscriptions(database *db.Database) gin.HandlerFunc { } // Return subscriptions in gpodder format, ensuring backward compatibility + // CRITICAL FIX for issue #636: Ensure arrays are never nil - AntennaPod requires arrays, not null + if podcasts == nil { + podcasts = []string{} + } + response := gin.H{ "add": podcasts, "remove": []string{}, @@ -238,45 +248,37 @@ func getSubscriptions(database *db.Database) gin.HandlerFunc { } // Process actual changes since the timestamp - // Query subscriptions added since the given timestamp + // Query subscriptions added since the given timestamp - simplified for performance var addRows *sql.Rows if database.IsPostgreSQLDB() { query = ` - SELECT DISTINCT s.PodcastURL - FROM "GpodderSyncSubscriptions" s - WHERE s.UserID = $1 - AND s.DeviceID != $2 - AND s.Timestamp > $3 - AND s.Action = 'add' - AND NOT EXISTS ( - SELECT 1 FROM "GpodderSyncSubscriptions" s2 - WHERE s2.UserID = s.UserID - AND s2.PodcastURL = s.PodcastURL - AND s2.DeviceID = $2 - AND s2.Timestamp > s.Timestamp - AND s2.Action = 'add' - ) + SELECT s.PodcastURL + FROM "GpodderSyncSubscriptions" s + WHERE s.UserID = $1 + AND s.DeviceID != $2 + AND s.Timestamp > $3 + AND s.Action = 'add' + GROUP BY s.PodcastURL + ORDER BY MAX(s.Timestamp) DESC + LIMIT $4 ` - addRows, err = database.Query(query, userID, deviceID, since) + log.Printf("[DEBUG] getSubscriptions: Executing add query with limit %d", MAX_SUBSCRIPTION_CHANGES) + addRows, err = database.Query(query, userID, deviceID, since, MAX_SUBSCRIPTION_CHANGES) } else { query = ` - SELECT DISTINCT s.PodcastURL - FROM GpodderSyncSubscriptions s - WHERE s.UserID = ? - AND s.DeviceID != ? - AND s.Timestamp > ? - AND s.Action = 'add' - AND NOT EXISTS ( - SELECT 1 FROM GpodderSyncSubscriptions s2 - WHERE s2.UserID = s.UserID - AND s2.PodcastURL = s.PodcastURL - AND s2.DeviceID = ? - AND s2.Timestamp > s.Timestamp - AND s2.Action = 'add' - ) + SELECT s.PodcastURL + FROM GpodderSyncSubscriptions s + WHERE s.UserID = ? + AND s.DeviceID != ? + AND s.Timestamp > ? + AND s.Action = 'add' + GROUP BY s.PodcastURL + ORDER BY MAX(s.Timestamp) DESC + LIMIT ? ` - addRows, err = database.Query(query, userID, deviceID, since, deviceID) + log.Printf("[DEBUG] getSubscriptions: Executing add query with limit %d", MAX_SUBSCRIPTION_CHANGES) + addRows, err = database.Query(query, userID, deviceID, since, MAX_SUBSCRIPTION_CHANGES) } if err != nil { @@ -286,7 +288,8 @@ func getSubscriptions(database *db.Database) gin.HandlerFunc { } defer addRows.Close() - addList := []string{} + // Ensure addList is never nil + addList := make([]string, 0) for addRows.Next() { var url string if err := addRows.Scan(&url); err != nil { @@ -296,45 +299,35 @@ func getSubscriptions(database *db.Database) gin.HandlerFunc { addList = append(addList, url) } - // Query subscriptions removed since the given timestamp + // Query subscriptions removed since the given timestamp - simplified for performance var removeRows *sql.Rows if database.IsPostgreSQLDB() { query = ` - SELECT DISTINCT s.PodcastURL - FROM "GpodderSyncSubscriptions" s - WHERE s.UserID = $1 - AND s.DeviceID != $2 - AND s.Timestamp > $3 - AND s.Action = 'remove' - AND NOT EXISTS ( - SELECT 1 FROM "GpodderSyncSubscriptions" s2 - WHERE s2.UserID = s.UserID - AND s2.PodcastURL = s.PodcastURL - AND s2.DeviceID = $2 - AND s2.Timestamp > s.Timestamp - AND s2.Action = 'add' - ) + SELECT s.PodcastURL + FROM "GpodderSyncSubscriptions" s + WHERE s.UserID = $1 + AND s.DeviceID != $2 + AND s.Timestamp > $3 + AND s.Action = 'remove' + GROUP BY s.PodcastURL + ORDER BY MAX(s.Timestamp) DESC + LIMIT $4 ` - removeRows, err = database.Query(query, userID, deviceID, since) + removeRows, err = database.Query(query, userID, deviceID, since, MAX_SUBSCRIPTION_CHANGES) } else { query = ` - SELECT DISTINCT s.PodcastURL - FROM GpodderSyncSubscriptions s - WHERE s.UserID = ? - AND s.DeviceID != ? - AND s.Timestamp > ? - AND s.Action = 'remove' - AND NOT EXISTS ( - SELECT 1 FROM GpodderSyncSubscriptions s2 - WHERE s2.UserID = s.UserID - AND s2.PodcastURL = s.PodcastURL - AND s2.DeviceID = ? - AND s2.Timestamp > s.Timestamp - AND s2.Action = 'add' - ) + SELECT s.PodcastURL + FROM GpodderSyncSubscriptions s + WHERE s.UserID = ? + AND s.DeviceID != ? + AND s.Timestamp > ? + AND s.Action = 'remove' + GROUP BY s.PodcastURL + ORDER BY MAX(s.Timestamp) DESC + LIMIT ? ` - removeRows, err = database.Query(query, userID, deviceID, since, deviceID) + removeRows, err = database.Query(query, userID, deviceID, since, MAX_SUBSCRIPTION_CHANGES) } if err != nil { @@ -344,7 +337,8 @@ func getSubscriptions(database *db.Database) gin.HandlerFunc { } defer removeRows.Close() - removeList := []string{} + // Ensure removeList is never nil + removeList := make([]string, 0) for removeRows.Next() { var url string if err := removeRows.Scan(&url); err != nil { @@ -378,6 +372,14 @@ func getSubscriptions(database *db.Database) gin.HandlerFunc { log.Printf("[WARNING] Error updating device last sync time: %v", err) } + // CRITICAL FIX for issue #636: Ensure arrays are never nil - AntennaPod requires arrays, not null + if addList == nil { + addList = []string{} + } + if removeList == nil { + removeList = []string{} + } + response := gin.H{ "add": addList, "remove": removeList, @@ -482,8 +484,8 @@ func getSubscriptions(database *db.Database) gin.HandlerFunc { } defer rows.Close() - // Build response - ONLY ITERATE ONCE - var urls []string + // Build response - ensure never nil + urls := make([]string, 0) for rows.Next() { var url string if err := rows.Scan(&url); err != nil { @@ -1001,7 +1003,7 @@ func uploadSubscriptionChanges(database *db.Database) gin.HandlerFunc { // Process subscriptions to add timestamp := time.Now().Unix() - updateURLs := make([][]string, 0) + updateURLs := make([][]string, 0) // Ensure never nil for _, url := range changes.Add { // Clean URL @@ -1299,8 +1301,8 @@ func getAllSubscriptions(database *db.Database) gin.HandlerFunc { } defer rows.Close() - // Build response - var urls []string + // Build response - ensure never nil + urls := make([]string, 0) for rows.Next() { var url string if err := rows.Scan(&url); err != nil { @@ -1425,8 +1427,8 @@ func getSubscriptionsSimple(database *db.Database) gin.HandlerFunc { } defer rows.Close() - // Build response - var urls []string + // Build response - ensure never nil + urls := make([]string, 0) for rows.Next() { var url string if err := rows.Scan(&url); err != nil { diff --git a/gpodder-api/internal/db/database.go b/gpodder-api/internal/db/database.go index 4eca2a31..4b4aa544 100644 --- a/gpodder-api/internal/db/database.go +++ b/gpodder-api/internal/db/database.go @@ -61,44 +61,19 @@ func NewDatabase(cfg config.DatabaseConfig) (*Database, error) { fmt.Println("Successfully connected to the database") - // Run migrations with retry logic for table dependencies - if err := runMigrationsWithRetry(db, cfg.Type); err != nil { - db.Close() - return nil, fmt.Errorf("failed to run migrations: %w", err) - } + // Migrations are now handled by the Python migration system + // Skip Go migrations to avoid conflicts + log.Println("Skipping Go migrations - now handled by Python migration system") return &Database{DB: db, Type: cfg.Type}, nil } -// runMigrationsWithRetry runs migrations with retry logic for dependency issues -func runMigrationsWithRetry(db *sql.DB, dbType string) error { - maxRetries := 10 - retryDelay := 3 * time.Second - - for attempt := 1; attempt <= maxRetries; attempt++ { - err := RunMigrations(db, dbType) - if err == nil { - log.Println("Migrations completed successfully") - return nil - } - - // Check if the error is due to missing prerequisite tables - if strings.Contains(err.Error(), "required table") && strings.Contains(err.Error(), "does not exist") { - log.Printf("Attempt %d/%d: Required PinePods tables not ready yet, retrying in %v... Error: %v", - attempt, maxRetries, retryDelay, err) - - if attempt < maxRetries { - time.Sleep(retryDelay) - continue - } - } - - // For other errors, fail immediately - return err - } - - return fmt.Errorf("failed to run migrations after %d attempts", maxRetries) -} +// runMigrationsWithRetry - DISABLED: migrations now handled by Python system +// func runMigrationsWithRetry(db *sql.DB, dbType string) error { +// All migration logic has been moved to the Python migration system +// to ensure consistency and centralized management +// This function is kept for reference but is no longer used +// } // connectPostgreSQL connects to a PostgreSQL database func connectPostgreSQL(cfg config.DatabaseConfig) (*sql.DB, error) { diff --git a/gpodder-api/internal/db/postgres.go b/gpodder-api/internal/db/postgres.go index e7561664..08b5bcb9 100644 --- a/gpodder-api/internal/db/postgres.go +++ b/gpodder-api/internal/db/postgres.go @@ -78,11 +78,9 @@ func NewPostgresDB(cfg config.DatabaseConfig) (*PostgresDB, error) { fmt.Println("Successfully connected to the database") - // Run migrations to ensure schema is up to date - if err := RunMigrations(db, "postgresql"); err != nil { - db.Close() - return nil, fmt.Errorf("failed to run migrations: %w", err) - } + // Migrations are now handled by the Python migration system + // Skip Go migrations to avoid conflicts + fmt.Println("Skipping Go migrations - now handled by Python migration system") return &PostgresDB{DB: db}, nil } diff --git a/gpodder-api/internal/models/models.go b/gpodder-api/internal/models/models.go index a0c5402d..14500a60 100644 --- a/gpodder-api/internal/models/models.go +++ b/gpodder-api/internal/models/models.go @@ -1,6 +1,7 @@ package models import ( + "encoding/json" "time" ) @@ -51,7 +52,7 @@ type SubscriptionResponse struct { Add []string `json:"add"` Remove []string `json:"remove"` Timestamp int64 `json:"timestamp"` - UpdateURLs [][]string `json:"update_urls,omitempty"` + UpdateURLs [][]string `json:"update_urls"` // Removed omitempty to ensure field is always present } // EpisodeAction represents an action performed on an episode @@ -69,12 +70,55 @@ type EpisodeAction struct { Episode string `json:"episode"` Device string `json:"device,omitempty"` Action string `json:"action"` - Timestamp interface{} `json:"timestamp"` // Accept any type + Timestamp interface{} `json:"-"` // Accept any type internally, but customize JSON output Started *int `json:"started,omitempty"` Position *int `json:"position,omitempty"` Total *int `json:"total,omitempty"` } +// MarshalJSON customizes JSON serialization to format timestamp as ISO 8601 string +// AntennaPod expects format: "yyyy-MM-dd'T'HH:mm:ss" (without Z timezone indicator) +func (e EpisodeAction) MarshalJSON() ([]byte, error) { + type Alias EpisodeAction + + // Convert timestamp to Unix seconds + var unixTimestamp int64 + switch t := e.Timestamp.(type) { + case int64: + unixTimestamp = t + case int: + unixTimestamp = int64(t) + case float64: + unixTimestamp = int64(t) + default: + // Default to current time if timestamp is invalid + unixTimestamp = time.Now().Unix() + } + + // Format as ISO 8601 without timezone (AntennaPod requirement) + timestampStr := time.Unix(unixTimestamp, 0).UTC().Format("2006-01-02T15:04:05") + + return json.Marshal(&struct { + Podcast string `json:"podcast"` + Episode string `json:"episode"` + Device string `json:"device,omitempty"` + Action string `json:"action"` + Timestamp string `json:"timestamp"` + Started *int `json:"started,omitempty"` + Position *int `json:"position,omitempty"` + Total *int `json:"total,omitempty"` + }{ + Podcast: e.Podcast, + Episode: e.Episode, + Device: e.Device, + Action: e.Action, + Timestamp: timestampStr, + Started: e.Started, + Position: e.Position, + Total: e.Total, + }) +} + // EpisodeActionResponse represents a response to episode action upload type EpisodeActionResponse struct { Timestamp int64 `json:"timestamp"` diff --git a/images/Download_on_the_App_Store_Badge_US-UK_RGB_blk_092917.svg b/images/Download_on_the_App_Store_Badge_US-UK_RGB_blk_092917.svg new file mode 100755 index 00000000..072b425a --- /dev/null +++ b/images/Download_on_the_App_Store_Badge_US-UK_RGB_blk_092917.svg @@ -0,0 +1,46 @@ + + Download_on_the_App_Store_Badge_US-UK_RGB_blk_4SVG_092917 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/images/badge_obtainium.png b/images/badge_obtainium.png new file mode 100644 index 00000000..a4cf4f9c Binary files /dev/null and b/images/badge_obtainium.png differ diff --git a/mobile/android/app/build.gradle b/mobile/android/app/build.gradle index ff6b413d..25daa62e 100644 --- a/mobile/android/app/build.gradle +++ b/mobile/android/app/build.gradle @@ -29,7 +29,7 @@ if (keystorePropertiesFile.exists()) { } android { - compileSdk 35 + compileSdk 36 ndkVersion flutter.ndkVersion compileOptions { @@ -43,13 +43,20 @@ android { defaultConfig { applicationId "com.gooseberrydevelopment.pinepods" - minSdkVersion 22 - targetSdkVersion 34 + minSdkVersion flutter.minSdkVersion + targetSdkVersion 36 versionCode flutterVersionCode.toInteger() versionName flutterVersionName testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner" } + dependenciesInfo { + // Disables dependency metadata when building APKs (for IzzyOnDroid/F-Droid) + includeInApk = false + // Disables dependency metadata when building Android App Bundles (for Google Play) + includeInBundle = false + } + signingConfigs { release { keyAlias keystoreProperties['keyAlias'] @@ -78,6 +85,11 @@ android { abortOnError false disable 'InvalidPackage' } + + // Disable PNG crunching for reproducible builds + aaptOptions { + cruncherEnabled = false + } } flutter { diff --git a/mobile/android/app/src/main/AndroidManifest.xml b/mobile/android/app/src/main/AndroidManifest.xml index 1b1b6f74..526a7034 100644 --- a/mobile/android/app/src/main/AndroidManifest.xml +++ b/mobile/android/app/src/main/AndroidManifest.xml @@ -9,6 +9,20 @@ + + + + + + + + + + + + + + - + @@ -45,11 +59,23 @@ - + + + + + + + + + + + + + CFBundleVersion 1.0 MinimumOSVersion - 12.0 + 13.0 diff --git a/mobile/ios/Flutter/Generated.xcconfig b/mobile/ios/Flutter/Generated.xcconfig index e5a4dc4e..b4e66a46 100644 --- a/mobile/ios/Flutter/Generated.xcconfig +++ b/mobile/ios/Flutter/Generated.xcconfig @@ -2,13 +2,15 @@ FLUTTER_ROOT=/Users/collin_pendleton/development/flutter FLUTTER_APPLICATION_PATH=/Users/collin_pendleton/Documents/github/PinePods/mobile COCOAPODS_PARALLEL_CODE_SIGN=true -FLUTTER_TARGET=lib/main.dart +FLUTTER_TARGET=/Users/collin_pendleton/Documents/github/PinePods/mobile/lib/main.dart FLUTTER_BUILD_DIR=build -FLUTTER_BUILD_NAME=0.7.9001 -FLUTTER_BUILD_NUMBER=0.7.9001 +FLUTTER_BUILD_NAME=0.8.0 +FLUTTER_BUILD_NUMBER=20252161 +FLUTTER_CLI_BUILD_MODE=debug EXCLUDED_ARCHS[sdk=iphonesimulator*]=i386 EXCLUDED_ARCHS[sdk=iphoneos*]=armv7 +DART_DEFINES=RkxVVFRFUl9WRVJTSU9OPTMuMzUuMg==,RkxVVFRFUl9DSEFOTkVMPXN0YWJsZQ==,RkxVVFRFUl9HSVRfVVJMPWh0dHBzOi8vZ2l0aHViLmNvbS9mbHV0dGVyL2ZsdXR0ZXIuZ2l0,RkxVVFRFUl9GUkFNRVdPUktfUkVWSVNJT049MDVkYjk2ODkwOA==,RkxVVFRFUl9FTkdJTkVfUkVWSVNJT049YThiZmRmYzM5NA==,RkxVVFRFUl9EQVJUX1ZFUlNJT049My45LjA= DART_OBFUSCATION=false TRACK_WIDGET_CREATION=true TREE_SHAKE_ICONS=false -PACKAGE_CONFIG=.dart_tool/package_config.json +PACKAGE_CONFIG=/Users/collin_pendleton/Documents/github/PinePods/mobile/.dart_tool/package_config.json diff --git a/mobile/ios/Flutter/flutter_export_environment.sh b/mobile/ios/Flutter/flutter_export_environment.sh index 9ee246b3..9e4d5f5f 100755 --- a/mobile/ios/Flutter/flutter_export_environment.sh +++ b/mobile/ios/Flutter/flutter_export_environment.sh @@ -3,11 +3,13 @@ export "FLUTTER_ROOT=/Users/collin_pendleton/development/flutter" export "FLUTTER_APPLICATION_PATH=/Users/collin_pendleton/Documents/github/PinePods/mobile" export "COCOAPODS_PARALLEL_CODE_SIGN=true" -export "FLUTTER_TARGET=lib/main.dart" +export "FLUTTER_TARGET=/Users/collin_pendleton/Documents/github/PinePods/mobile/lib/main.dart" export "FLUTTER_BUILD_DIR=build" -export "FLUTTER_BUILD_NAME=0.7.9" -export "FLUTTER_BUILD_NUMBER=0.7.9" +export "FLUTTER_BUILD_NAME=0.8.0" +export "FLUTTER_BUILD_NUMBER=20252161" +export "FLUTTER_CLI_BUILD_MODE=debug" +export "DART_DEFINES=RkxVVFRFUl9WRVJTSU9OPTMuMzUuMg==,RkxVVFRFUl9DSEFOTkVMPXN0YWJsZQ==,RkxVVFRFUl9HSVRfVVJMPWh0dHBzOi8vZ2l0aHViLmNvbS9mbHV0dGVyL2ZsdXR0ZXIuZ2l0,RkxVVFRFUl9GUkFNRVdPUktfUkVWSVNJT049MDVkYjk2ODkwOA==,RkxVVFRFUl9FTkdJTkVfUkVWSVNJT049YThiZmRmYzM5NA==,RkxVVFRFUl9EQVJUX1ZFUlNJT049My45LjA=" export "DART_OBFUSCATION=false" export "TRACK_WIDGET_CREATION=true" export "TREE_SHAKE_ICONS=false" -export "PACKAGE_CONFIG=.dart_tool/package_config.json" +export "PACKAGE_CONFIG=/Users/collin_pendleton/Documents/github/PinePods/mobile/.dart_tool/package_config.json" diff --git a/mobile/ios/Podfile b/mobile/ios/Podfile index cc146a7b..37078cf6 100644 --- a/mobile/ios/Podfile +++ b/mobile/ios/Podfile @@ -1,5 +1,5 @@ # Uncomment this line to define a global platform for your project -platform :ios, '12.0' +platform :ios, '13.0' # CocoaPods analytics sends network stats synchronously affecting flutter build latency. ENV['COCOAPODS_DISABLE_STATS'] = 'true' @@ -42,6 +42,8 @@ post_install do |installer| flutter_additional_ios_build_settings(target) target.build_configurations.each do |config| + # Ensure minimum deployment target is 13.0 + config.build_settings['IPHONEOS_DEPLOYMENT_TARGET'] = '13.0' config.build_settings['GCC_PREPROCESSOR_DEFINITIONS'] ||= [ '$(inherited)', diff --git a/mobile/ios/Podfile.lock b/mobile/ios/Podfile.lock new file mode 100644 index 00000000..65b45558 --- /dev/null +++ b/mobile/ios/Podfile.lock @@ -0,0 +1,165 @@ +PODS: + - app_links (0.0.2): + - Flutter + - audio_service (0.0.1): + - Flutter + - FlutterMacOS + - audio_session (0.0.1): + - Flutter + - connectivity_plus (0.0.1): + - Flutter + - device_info_plus (0.0.1): + - Flutter + - DKImagePickerController/Core (4.3.9): + - DKImagePickerController/ImageDataManager + - DKImagePickerController/Resource + - DKImagePickerController/ImageDataManager (4.3.9) + - DKImagePickerController/PhotoGallery (4.3.9): + - DKImagePickerController/Core + - DKPhotoGallery + - DKImagePickerController/Resource (4.3.9) + - DKPhotoGallery (0.0.19): + - DKPhotoGallery/Core (= 0.0.19) + - DKPhotoGallery/Model (= 0.0.19) + - DKPhotoGallery/Preview (= 0.0.19) + - DKPhotoGallery/Resource (= 0.0.19) + - SDWebImage + - SwiftyGif + - DKPhotoGallery/Core (0.0.19): + - DKPhotoGallery/Model + - DKPhotoGallery/Preview + - SDWebImage + - SwiftyGif + - DKPhotoGallery/Model (0.0.19): + - SDWebImage + - SwiftyGif + - DKPhotoGallery/Preview (0.0.19): + - DKPhotoGallery/Model + - DKPhotoGallery/Resource + - SDWebImage + - SwiftyGif + - DKPhotoGallery/Resource (0.0.19): + - SDWebImage + - SwiftyGif + - file_picker (0.0.1): + - DKImagePickerController/PhotoGallery + - Flutter + - Flutter (1.0.0) + - flutter_downloader (0.0.1): + - Flutter + - just_audio (0.0.1): + - Flutter + - FlutterMacOS + - package_info_plus (0.4.5): + - Flutter + - path_provider_foundation (0.0.1): + - Flutter + - FlutterMacOS + - permission_handler_apple (9.3.0): + - Flutter + - SDWebImage (5.21.1): + - SDWebImage/Core (= 5.21.1) + - SDWebImage/Core (5.21.1) + - share_plus (0.0.1): + - Flutter + - shared_preferences_foundation (0.0.1): + - Flutter + - FlutterMacOS + - sqflite_darwin (0.0.4): + - Flutter + - FlutterMacOS + - SwiftyGif (5.4.5) + - url_launcher_ios (0.0.1): + - Flutter + - webview_flutter_wkwebview (0.0.1): + - Flutter + - FlutterMacOS + +DEPENDENCIES: + - app_links (from `.symlinks/plugins/app_links/ios`) + - audio_service (from `.symlinks/plugins/audio_service/darwin`) + - audio_session (from `.symlinks/plugins/audio_session/ios`) + - connectivity_plus (from `.symlinks/plugins/connectivity_plus/ios`) + - device_info_plus (from `.symlinks/plugins/device_info_plus/ios`) + - file_picker (from `.symlinks/plugins/file_picker/ios`) + - Flutter (from `Flutter`) + - flutter_downloader (from `.symlinks/plugins/flutter_downloader/ios`) + - just_audio (from `.symlinks/plugins/just_audio/darwin`) + - package_info_plus (from `.symlinks/plugins/package_info_plus/ios`) + - path_provider_foundation (from `.symlinks/plugins/path_provider_foundation/darwin`) + - permission_handler_apple (from `.symlinks/plugins/permission_handler_apple/ios`) + - share_plus (from `.symlinks/plugins/share_plus/ios`) + - shared_preferences_foundation (from `.symlinks/plugins/shared_preferences_foundation/darwin`) + - sqflite_darwin (from `.symlinks/plugins/sqflite_darwin/darwin`) + - url_launcher_ios (from `.symlinks/plugins/url_launcher_ios/ios`) + - webview_flutter_wkwebview (from `.symlinks/plugins/webview_flutter_wkwebview/darwin`) + +SPEC REPOS: + trunk: + - DKImagePickerController + - DKPhotoGallery + - SDWebImage + - SwiftyGif + +EXTERNAL SOURCES: + app_links: + :path: ".symlinks/plugins/app_links/ios" + audio_service: + :path: ".symlinks/plugins/audio_service/darwin" + audio_session: + :path: ".symlinks/plugins/audio_session/ios" + connectivity_plus: + :path: ".symlinks/plugins/connectivity_plus/ios" + device_info_plus: + :path: ".symlinks/plugins/device_info_plus/ios" + file_picker: + :path: ".symlinks/plugins/file_picker/ios" + Flutter: + :path: Flutter + flutter_downloader: + :path: ".symlinks/plugins/flutter_downloader/ios" + just_audio: + :path: ".symlinks/plugins/just_audio/darwin" + package_info_plus: + :path: ".symlinks/plugins/package_info_plus/ios" + path_provider_foundation: + :path: ".symlinks/plugins/path_provider_foundation/darwin" + permission_handler_apple: + :path: ".symlinks/plugins/permission_handler_apple/ios" + share_plus: + :path: ".symlinks/plugins/share_plus/ios" + shared_preferences_foundation: + :path: ".symlinks/plugins/shared_preferences_foundation/darwin" + sqflite_darwin: + :path: ".symlinks/plugins/sqflite_darwin/darwin" + url_launcher_ios: + :path: ".symlinks/plugins/url_launcher_ios/ios" + webview_flutter_wkwebview: + :path: ".symlinks/plugins/webview_flutter_wkwebview/darwin" + +SPEC CHECKSUMS: + app_links: 76b66b60cc809390ca1ad69bfd66b998d2387ac7 + audio_service: aa99a6ba2ae7565996015322b0bb024e1d25c6fd + audio_session: 9bb7f6c970f21241b19f5a3658097ae459681ba0 + connectivity_plus: cb623214f4e1f6ef8fe7403d580fdad517d2f7dd + device_info_plus: 21fcca2080fbcd348be798aa36c3e5ed849eefbe + DKImagePickerController: 946cec48c7873164274ecc4624d19e3da4c1ef3c + DKPhotoGallery: b3834fecb755ee09a593d7c9e389d8b5d6deed60 + file_picker: a0560bc09d61de87f12d246fc47d2119e6ef37be + Flutter: cabc95a1d2626b1b06e7179b784ebcf0c0cde467 + flutter_downloader: 78da0da1084e709cbfd3b723c7ea349c71681f09 + just_audio: 4e391f57b79cad2b0674030a00453ca5ce817eed + package_info_plus: af8e2ca6888548050f16fa2f1938db7b5a5df499 + path_provider_foundation: 080d55be775b7414fd5a5ef3ac137b97b097e564 + permission_handler_apple: 4ed2196e43d0651e8ff7ca3483a069d469701f2d + SDWebImage: f29024626962457f3470184232766516dee8dfea + share_plus: 50da8cb520a8f0f65671c6c6a99b3617ed10a58a + shared_preferences_foundation: 9e1978ff2562383bd5676f64ec4e9aa8fa06a6f7 + sqflite_darwin: 20b2a3a3b70e43edae938624ce550a3cbf66a3d0 + SwiftyGif: 706c60cf65fa2bc5ee0313beece843c8eb8194d4 + url_launcher_ios: 694010445543906933d732453a59da0a173ae33d + webview_flutter_wkwebview: 1821ceac936eba6f7984d89a9f3bcb4dea99ebb2 + +PODFILE CHECKSUM: 0ab06865a10aced8dcbecd5fae08a60eea944bfe + +COCOAPODS: 1.16.2 diff --git a/mobile/ios/Runner.xcodeproj/project.pbxproj b/mobile/ios/Runner.xcodeproj/project.pbxproj index c85d8d42..a3c5b47a 100644 --- a/mobile/ios/Runner.xcodeproj/project.pbxproj +++ b/mobile/ios/Runner.xcodeproj/project.pbxproj @@ -238,12 +238,12 @@ ); runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; - shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" embed_and_thin"; + shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" embed_and_thin\n"; }; 9740EEB61CF901F6004384FC /* Run Script */ = { isa = PBXShellScriptBuildPhase; alwaysOutOfDate = 1; - buildActionMask = 2147483647; + buildActionMask = 12; files = ( ); inputPaths = ( @@ -253,7 +253,7 @@ ); runOnlyForDeploymentPostprocessing = 0; shellPath = /bin/sh; - shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build"; + shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build\n"; }; B2A05C7B67BBE001F257F90D /* [CP] Embed Pods Frameworks */ = { isa = PBXShellScriptBuildPhase; @@ -365,7 +365,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 12.0; + IPHONEOS_DEPLOYMENT_TARGET = 13.0; MTL_ENABLE_DEBUG_INFO = NO; ONLY_ACTIVE_ARCH = YES; SDKROOT = iphoneos; @@ -382,7 +382,7 @@ ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; CLANG_ENABLE_MODULES = YES; CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)"; - DEVELOPMENT_TEAM = ""; + DEVELOPMENT_TEAM = 879LYRSYW9; ENABLE_BITCODE = NO; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", @@ -397,8 +397,8 @@ "$(inherited)", "$(PROJECT_DIR)/Flutter", ); - MARKETING_VERSION = 1.1.0; - PRODUCT_BUNDLE_IDENTIFIER = com.placeholder; + MARKETING_VERSION = 1.1.2; + PRODUCT_BUNDLE_IDENTIFIER = com.gooseberrydevelopment.pinepods; PRODUCT_NAME = "$(TARGET_NAME)"; SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h"; SWIFT_VERSION = 5.0; @@ -454,7 +454,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 12.0; + IPHONEOS_DEPLOYMENT_TARGET = 13.0; MTL_ENABLE_DEBUG_INFO = YES; ONLY_ACTIVE_ARCH = YES; SDKROOT = iphoneos; @@ -504,7 +504,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 12.0; + IPHONEOS_DEPLOYMENT_TARGET = 13.0; MTL_ENABLE_DEBUG_INFO = NO; ONLY_ACTIVE_ARCH = YES; SDKROOT = iphoneos; @@ -523,7 +523,7 @@ ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; CLANG_ENABLE_MODULES = YES; CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)"; - DEVELOPMENT_TEAM = ""; + DEVELOPMENT_TEAM = 879LYRSYW9; ENABLE_BITCODE = NO; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", @@ -538,8 +538,8 @@ "$(inherited)", "$(PROJECT_DIR)/Flutter", ); - MARKETING_VERSION = 1.1.0; - PRODUCT_BUNDLE_IDENTIFIER = com.placeholder; + MARKETING_VERSION = 1.1.2; + PRODUCT_BUNDLE_IDENTIFIER = com.gooseberrydevelopment.pinepods; PRODUCT_NAME = "$(TARGET_NAME)"; SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h"; SWIFT_OPTIMIZATION_LEVEL = "-Onone"; @@ -555,7 +555,7 @@ ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; CLANG_ENABLE_MODULES = YES; CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)"; - DEVELOPMENT_TEAM = ""; + DEVELOPMENT_TEAM = 879LYRSYW9; ENABLE_BITCODE = NO; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", @@ -570,8 +570,8 @@ "$(inherited)", "$(PROJECT_DIR)/Flutter", ); - MARKETING_VERSION = 1.1.0; - PRODUCT_BUNDLE_IDENTIFIER = com.placeholder; + MARKETING_VERSION = 1.1.2; + PRODUCT_BUNDLE_IDENTIFIER = com.gooseberrydevelopment.pinepods; PRODUCT_NAME = "$(TARGET_NAME)"; SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h"; SWIFT_VERSION = 5.0; diff --git a/mobile/ios/Runner/GeneratedPluginRegistrant.m b/mobile/ios/Runner/GeneratedPluginRegistrant.m index 0ef72e8c..b5204fdb 100644 --- a/mobile/ios/Runner/GeneratedPluginRegistrant.m +++ b/mobile/ios/Runner/GeneratedPluginRegistrant.m @@ -54,6 +54,12 @@ @import just_audio; #endif +#if __has_include() +#import +#else +@import package_info_plus; +#endif + #if __has_include() #import #else @@ -90,6 +96,12 @@ @import url_launcher_ios; #endif +#if __has_include() +#import +#else +@import webview_flutter_wkwebview; +#endif + @implementation GeneratedPluginRegistrant + (void)registerWithRegistry:(NSObject*)registry { @@ -101,12 +113,14 @@ + (void)registerWithRegistry:(NSObject*)registry { [FilePickerPlugin registerWithRegistrar:[registry registrarForPlugin:@"FilePickerPlugin"]]; [FlutterDownloaderPlugin registerWithRegistrar:[registry registrarForPlugin:@"FlutterDownloaderPlugin"]]; [JustAudioPlugin registerWithRegistrar:[registry registrarForPlugin:@"JustAudioPlugin"]]; + [FPPPackageInfoPlusPlugin registerWithRegistrar:[registry registrarForPlugin:@"FPPPackageInfoPlusPlugin"]]; [PathProviderPlugin registerWithRegistrar:[registry registrarForPlugin:@"PathProviderPlugin"]]; [PermissionHandlerPlugin registerWithRegistrar:[registry registrarForPlugin:@"PermissionHandlerPlugin"]]; [FPPSharePlusPlugin registerWithRegistrar:[registry registrarForPlugin:@"FPPSharePlusPlugin"]]; [SharedPreferencesPlugin registerWithRegistrar:[registry registrarForPlugin:@"SharedPreferencesPlugin"]]; [SqflitePlugin registerWithRegistrar:[registry registrarForPlugin:@"SqflitePlugin"]]; [URLLauncherPlugin registerWithRegistrar:[registry registrarForPlugin:@"URLLauncherPlugin"]]; + [WebViewFlutterPlugin registerWithRegistrar:[registry registrarForPlugin:@"WebViewFlutterPlugin"]]; } @end diff --git a/mobile/ios/Runner/Info.plist b/mobile/ios/Runner/Info.plist index 1004ac91..aa88be53 100644 --- a/mobile/ios/Runner/Info.plist +++ b/mobile/ios/Runner/Info.plist @@ -34,13 +34,30 @@ pinepods-subscribe + + CFBundleTypeRole + Viewer + CFBundleURLName + com.gooseberrydevelopment.pinepods.auth + CFBundleURLSchemes + + pinepods + + CFBundleVersion $(FLUTTER_BUILD_NUMBER) FDMaximumConcurrentTasks 1 + FlutterDeepLinkingEnabled + YES ITSAppUsesNonExemptEncryption + LSApplicationQueriesSchemes + + https + http + LSRequiresIPhoneOS NSAppTransportSecurity @@ -77,13 +94,6 @@ UIViewControllerBasedStatusBarAppearance - FlutterDeepLinkingEnabled - NO - LSApplicationQueriesSchemes - - https - http - UTExportedTypeDeclarations diff --git a/mobile/lib/api/podcast/mobile_podcast_api.dart b/mobile/lib/api/podcast/mobile_podcast_api.dart index 9e3ce50b..aa67a3ce 100644 --- a/mobile/lib/api/podcast/mobile_podcast_api.dart +++ b/mobile/lib/api/podcast/mobile_podcast_api.dart @@ -9,6 +9,8 @@ import 'package:pinepods_mobile/core/environment.dart'; import 'package:pinepods_mobile/entities/transcript.dart'; import 'package:flutter/foundation.dart'; import 'package:podcast_search/podcast_search.dart' as podcast_search; +import 'package:http/http.dart' as http; +import 'package:html/parser.dart' as html; /// An implementation of the [PodcastApi]. /// @@ -92,6 +94,11 @@ class MobilePodcastApi extends PodcastApi { @override Future loadTranscript(TranscriptUrl transcriptUrl) async { + // Handle HTML transcripts with custom parser + if (transcriptUrl.type == TranscriptFormat.html) { + return await _loadHtmlTranscript(transcriptUrl); + } + late podcast_search.TranscriptFormat format; switch (transcriptUrl.type) { @@ -101,6 +108,10 @@ class MobilePodcastApi extends PodcastApi { case TranscriptFormat.json: format = podcast_search.TranscriptFormat.json; break; + case TranscriptFormat.html: + // This case is now handled above + format = podcast_search.TranscriptFormat.unsupported; + break; case TranscriptFormat.unsupported: format = podcast_search.TranscriptFormat.unsupported; break; @@ -125,6 +136,61 @@ class MobilePodcastApi extends PodcastApi { } } + /// Parse HTML transcript content into a transcript object + Future _loadHtmlTranscript(TranscriptUrl transcriptUrl) async { + try { + final response = await http.get(Uri.parse(transcriptUrl.url)); + + if (response.statusCode != 200) { + return podcast_search.Transcript(); + } + + final document = html.parse(response.body); + final subtitles = []; + + // For HTML transcripts, find the main content area and render as a single block + String transcriptContent = ''; + + // Try to find the main transcript content area + final transcriptContainer = document.querySelector('.transcript, .content, main, article') ?? + document.querySelector('body'); + + if (transcriptContainer != null) { + transcriptContent = transcriptContainer.innerHtml; + + // Clean up common unwanted elements + final cleanDoc = html.parse(transcriptContent); + + // Remove navigation, headers, footers, ads, etc. + for (final selector in ['nav', 'header', 'footer', '.nav', '.navigation', '.ads', '.advertisement', '.sidebar']) { + cleanDoc.querySelectorAll(selector).forEach((el) => el.remove()); + } + + transcriptContent = cleanDoc.body?.innerHtml ?? transcriptContent; + + // Process markdown-style links [text](url) -> text + transcriptContent = transcriptContent.replaceAllMapped( + RegExp(r'\[([^\]]+)\]\(([^)]+)\)'), + (match) => '${match.group(1)}', + ); + + // Create a single subtitle entry for the entire HTML transcript + subtitles.add(podcast_search.Subtitle( + index: 0, + start: const Duration(seconds: 0), + end: const Duration(seconds: 1), // Minimal duration since timing doesn't matter + data: '{{HTMLFULL}}$transcriptContent', + speaker: '', + )); + } + + return podcast_search.Transcript(subtitles: subtitles); + } catch (e) { + debugPrint('Error parsing HTML transcript: $e'); + return podcast_search.Transcript(); + } + } + static Future _search(Map searchParams) { var term = searchParams['term']!; var provider = searchParams['searchProvider'] == 'itunes' diff --git a/mobile/lib/bloc/podcast/podcast_bloc.dart b/mobile/lib/bloc/podcast/podcast_bloc.dart index 784735a7..35067b98 100644 --- a/mobile/lib/bloc/podcast/podcast_bloc.dart +++ b/mobile/lib/bloc/podcast/podcast_bloc.dart @@ -12,6 +12,8 @@ import 'package:pinepods_mobile/services/download/download_service.dart'; import 'package:pinepods_mobile/services/download/mobile_download_service.dart'; import 'package:pinepods_mobile/services/podcast/podcast_service.dart'; import 'package:pinepods_mobile/services/settings/settings_service.dart'; +import 'package:pinepods_mobile/services/pinepods/pinepods_service.dart'; +import 'package:pinepods_mobile/entities/pinepods_search.dart'; import 'package:pinepods_mobile/state/bloc_state.dart'; import 'package:collection/collection.dart' show IterableExtension; import 'package:logging/logging.dart'; @@ -305,19 +307,65 @@ class PodcastBloc extends Bloc { switch (event) { case PodcastEvent.subscribe: if (_podcast != null) { + // Emit loading state for subscription + _podcastStream.add(BlocLoadingState(_podcast)); + + // First, subscribe locally _podcast = await podcastService.subscribe(_podcast!); - _podcastStream.add(BlocPopulatedState(results: _podcast)); - _loadSubscriptions(); - _episodesStream.add(_podcast?.episodes); + + // Check if we're in a PinePods environment and also add to server + if (_podcast != null) { + try { + final settings = settingsService.settings; + if (settings != null && + settings.pinepodsServer != null && + settings.pinepodsApiKey != null && + settings.pinepodsUserId != null) { + + // Also add to PinePods server + final pinepodsService = PinepodsService(); + pinepodsService.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); + + final unifiedPodcast = UnifiedPinepodsPodcast( + id: 0, + indexId: 0, + title: _podcast!.title, + url: _podcast!.url ?? '', + originalUrl: _podcast!.url ?? '', + link: _podcast!.link ?? '', + description: _podcast!.description ?? '', + author: _podcast!.copyright ?? '', + ownerName: _podcast!.copyright ?? '', + image: _podcast!.imageUrl ?? '', + artwork: _podcast!.imageUrl ?? '', + lastUpdateTime: 0, + explicit: false, + episodeCount: 0, + ); + + await pinepodsService.addPodcast(unifiedPodcast, settings.pinepodsUserId!); + log.fine('Added podcast to PinePods server'); + } + } catch (e) { + log.warning('Failed to add podcast to PinePods server: $e'); + // Continue with local subscription even if server add fails + } + + _episodes = _podcast!.episodes; + _podcastStream.add(BlocPopulatedState(results: _podcast)); + _loadSubscriptions(); + _refresh(); // Use _refresh to apply filters and update episode stream properly + } } break; case PodcastEvent.unsubscribe: if (_podcast != null) { await podcastService.unsubscribe(_podcast!); _podcast!.id = null; + _episodes = _podcast!.episodes; _podcastStream.add(BlocPopulatedState(results: _podcast)); _loadSubscriptions(); - _episodesStream.add(_podcast!.episodes); + _refresh(); // Use _refresh to apply filters and update episode stream properly } break; case PodcastEvent.markAllPlayed: diff --git a/mobile/lib/core/environment.dart b/mobile/lib/core/environment.dart index 862a04b4..121ec0d6 100644 --- a/mobile/lib/core/environment.dart +++ b/mobile/lib/core/environment.dart @@ -22,7 +22,7 @@ const userAgentAppString = String.fromEnvironment( /// Link to a feedback form. This will be shown in the main overflow menu if set const feedbackUrl = String.fromEnvironment('FEEDBACK_URL', defaultValue: ''); -/// This class stores version information for Anytime, including project version and +/// This class stores version information for PinePods, including project version and /// build number. This is then used for user agent strings when interacting with /// APIs and RSS feeds. /// @@ -32,8 +32,8 @@ class Environment { static const _applicationName = 'Pinepods'; static const _applicationUrl = 'https://github.com/madeofpendletonwool/pinepods'; - static const _projectVersion = '0.7.9'; - static const _build = '166'; + static const _projectVersion = '0.8.1'; + static const _build = '20252203'; static var _agentString = userAgentAppString; diff --git a/mobile/lib/core/extensions.dart b/mobile/lib/core/extensions.dart index a3b351ed..859621cf 100644 --- a/mobile/lib/core/extensions.dart +++ b/mobile/lib/core/extensions.dart @@ -33,6 +33,17 @@ extension ExtString on String? { final url = Uri.tryParse(this!); if (url == null || !url.isScheme('http')) return this!; + + // Don't force HTTPS for localhost or local IP addresses to support self-hosted development + final host = url.host.toLowerCase(); + if (host == 'localhost' || + host == '127.0.0.1' || + host.startsWith('10.') || + host.startsWith('192.168.') || + host.startsWith('172.') || + host.endsWith('.local')) { + return this!; + } return url.replace(scheme: 'https').toString(); } diff --git a/mobile/lib/core/utils.dart b/mobile/lib/core/utils.dart index b4e28295..e013f1cb 100644 --- a/mobile/lib/core/utils.dart +++ b/mobile/lib/core/utils.dart @@ -64,7 +64,7 @@ Future getStorageDirectory() async { directory = await getApplicationSupportDirectory(); } - return join(directory.path, 'AnyTime'); + return join(directory.path, 'PinePods'); } Future hasExternalStorage() async { diff --git a/mobile/lib/entities/chapter.dart b/mobile/lib/entities/chapter.dart index 48b7a842..0b093b9e 100644 --- a/mobile/lib/entities/chapter.dart +++ b/mobile/lib/entities/chapter.dart @@ -56,8 +56,8 @@ class Chapter { imageUrl: chapter['imageUrl'] as String?, url: chapter['url'] as String?, toc: chapter['toc'] == 'false' ? false : true, - startTime: double.parse(chapter['startTime'] as String), - endTime: double.parse(chapter['endTime'] as String), + startTime: double.tryParse(chapter['startTime'] as String? ?? '0') ?? 0.0, + endTime: double.tryParse(chapter['endTime'] as String? ?? '0') ?? 0.0, ); } diff --git a/mobile/lib/entities/home_data.dart b/mobile/lib/entities/home_data.dart index b7c5fc2e..452047f7 100644 --- a/mobile/lib/entities/home_data.dart +++ b/mobile/lib/entities/home_data.dart @@ -40,7 +40,7 @@ class HomePodcast { podcastIndexId: json['podcastindexid'], artworkUrl: json['artworkurl'], author: json['author'], - categories: json['categories'], + categories: _parseCategories(json['categories']), description: json['description'], episodeCount: json['episodecount'], feedUrl: json['feedurl'], @@ -51,6 +51,22 @@ class HomePodcast { totalListenTime: json['total_listen_time'], ); } + + /// Parse categories from either string or Map format + static String? _parseCategories(dynamic categories) { + if (categories == null) return null; + + if (categories is String) { + // Old format - return as is + return categories; + } else if (categories is Map) { + // New format - convert map values to comma-separated string + if (categories.isEmpty) return null; + return categories.values.join(', '); + } + + return null; + } } class HomeEpisode { diff --git a/mobile/lib/entities/pinepods_search.dart b/mobile/lib/entities/pinepods_search.dart index 7e84fbaa..9ec80525 100644 --- a/mobile/lib/entities/pinepods_search.dart +++ b/mobile/lib/entities/pinepods_search.dart @@ -279,8 +279,8 @@ class UnifiedPinepodsPodcast { factory UnifiedPinepodsPodcast.fromPodcast(PinepodsPodcast podcast) { return UnifiedPinepodsPodcast( - id: podcast.id, - indexId: podcast.id, + id: 0, // Internal database ID - will be fetched when needed + indexId: podcast.id, // Podcast index ID title: podcast.title, url: podcast.url, originalUrl: podcast.originalUrl, diff --git a/mobile/lib/entities/search_providers.dart b/mobile/lib/entities/search_providers.dart index 45725460..f34b0929 100644 --- a/mobile/lib/entities/search_providers.dart +++ b/mobile/lib/entities/search_providers.dart @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -/// Anytime can support multiple search providers. +/// PinePods can support multiple search providers. /// /// This class represents a provider. class SearchProvider { diff --git a/mobile/lib/entities/transcript.dart b/mobile/lib/entities/transcript.dart index 226bde6e..5ca85ffa 100644 --- a/mobile/lib/entities/transcript.dart +++ b/mobile/lib/entities/transcript.dart @@ -8,6 +8,7 @@ import 'package:flutter/foundation.dart'; enum TranscriptFormat { json, subrip, + html, unsupported, } @@ -39,9 +40,12 @@ class TranscriptUrl { case TranscriptFormat.json: t = 1; break; - case TranscriptFormat.unsupported: + case TranscriptFormat.html: t = 2; break; + case TranscriptFormat.unsupported: + t = 3; + break; } return { @@ -65,6 +69,9 @@ class TranscriptUrl { t = TranscriptFormat.json; break; case 2: + t = TranscriptFormat.html; + break; + case 3: t = TranscriptFormat.unsupported; break; } diff --git a/mobile/lib/l10n/L.dart b/mobile/lib/l10n/L.dart index c8ec0229..2f971297 100644 --- a/mobile/lib/l10n/L.dart +++ b/mobile/lib/l10n/L.dart @@ -10,8 +10,13 @@ import 'messages_all.dart'; class L { L(this.localeName, this.overrides); - static Future load(Locale locale, Map> overrides) { - final name = locale.countryCode?.isEmpty ?? true ? locale.languageCode : locale.toString(); + static Future load( + Locale locale, + Map> overrides, + ) { + final name = locale.countryCode?.isEmpty ?? true + ? locale.languageCode + : locale.toString(); final localeName = Intl.canonicalizedLocale(name); return initializeMessages(localeName).then((_) { @@ -28,10 +33,13 @@ class L { /// Message definitions start here String? message(String name) { - if (overrides == null || overrides.isEmpty || !overrides.containsKey(name)) { + if (overrides == null || + overrides.isEmpty || + !overrides.containsKey(name)) { return null; } else { - return overrides[name]![localeName] ?? 'Missing translation for $name and locale $localeName'; + return overrides[name]![localeName] ?? + 'Missing translation for $name and locale $localeName'; } } @@ -162,7 +170,8 @@ class L { Intl.message( 'Search for podcasts', name: 'search_for_podcasts_hint', - desc: 'Hint displayed on search bar when the user clicks the search icon.', + desc: + 'Hint displayed on search bar when the user clicks the search icon.', locale: localeName, ); } @@ -172,7 +181,8 @@ class L { Intl.message( 'Head to Settings to Connect a Pinepods Server if you haven\'t yet!', name: 'no_subscriptions_message', - desc: 'Displayed on the library tab when the user has no subscriptions', + desc: + 'Displayed on the library tab when the user has no subscriptions', locale: localeName, ); } @@ -222,7 +232,8 @@ class L { Intl.message( 'Are you sure you wish to delete this episode?', name: 'delete_episode_confirmation', - desc: 'User is asked to confirm when they attempt to delete an episode', + desc: + 'User is asked to confirm when they attempt to delete an episode', locale: localeName, ); } @@ -242,7 +253,8 @@ class L { Intl.message( 'You do not have any downloaded episodes', name: 'no_downloads_message', - desc: 'Displayed on the library tab when the user has no subscriptions', + desc: + 'Displayed on the library tab when the user has no subscriptions', locale: localeName, ); } @@ -252,7 +264,8 @@ class L { Intl.message( 'No podcasts found', name: 'no_search_results_message', - desc: 'Displayed on the library tab when the user has no subscriptions', + desc: + 'Displayed on the library tab when the user has no subscriptions', locale: localeName, ); } @@ -262,7 +275,8 @@ class L { Intl.message( 'Could not load podcast episodes. Please check your connection.', name: 'no_podcast_details_message', - desc: 'Displayed on the podcast details page when the details could not be loaded', + desc: + 'Displayed on the podcast details page when the details could not be loaded', locale: localeName, ); } @@ -412,7 +426,8 @@ class L { Intl.message( 'Are you sure you wish to stop this download and delete the episode?', name: 'stop_download_confirmation', - desc: 'User is asked to confirm when they wish to stop the active download.', + desc: + 'User is asked to confirm when they wish to stop the active download.', locale: localeName, ); } @@ -482,7 +497,8 @@ class L { Intl.message( 'New downloads will be saved to internal storage. Existing downloads will remain on the SD card.', name: 'settings_download_switch_internal', - desc: 'Displayed when user switches from internal SD card to internal storage', + desc: + 'Displayed when user switches from internal SD card to internal storage', locale: localeName, ); } @@ -580,7 +596,7 @@ class L { String get consent_message { return message('consent_message') ?? Intl.message( - 'This funding link will take you to an external site where you will be able to directly support the show. Links are provided by the podcast authors and is not controlled by Anytime.', + 'This funding link will take you to an external site where you will be able to directly support the show. Links are provided by the podcast authors and is not controlled by PinePods.', name: 'consent_message', desc: 'Display when first accessing external funding link', locale: localeName, @@ -632,7 +648,8 @@ class L { Intl.message( 'Full screen player mode on episode start', name: 'settings_auto_open_now_playing', - desc: 'Displayed when user switches to use full screen player automatically', + desc: + 'Displayed when user switches to use full screen player automatically', locale: localeName, ); } @@ -642,7 +659,8 @@ class L { Intl.message( 'Unable to play episode. Please check your connection and try again.', name: 'error_no_connection', - desc: 'Displayed when attempting to start streaming an episode with no data connection', + desc: + 'Displayed when attempting to start streaming an episode with no data connection', locale: localeName, ); } @@ -652,7 +670,8 @@ class L { Intl.message( 'An unexpected error occurred during playback. Please check your connection and try again.', name: 'error_playback_fail', - desc: 'Displayed when attempting to start streaming an episode with no data connection', + desc: + 'Displayed when attempting to start streaming an episode with no data connection', locale: localeName, ); } @@ -1100,7 +1119,7 @@ class L { String get transcript_why_not_url { return message('transcript_why_not_url') ?? Intl.message( - 'https://anytimeplayer.app/docs/anytime_transcript_support_en.html', + 'https://www.pinepods.online/docs/Features/Transcript', name: 'transcript_why_not_url', desc: 'Language specific link', locale: localeName, @@ -1473,7 +1492,8 @@ class L { Intl.message( 'No Episodes Found', name: 'episode_filter_no_episodes_title_description', - desc: 'This podcast has no episodes matching your search criteria and filter', + desc: + 'This podcast has no episodes matching your search criteria and filter', locale: localeName, ); } @@ -1583,7 +1603,8 @@ class L { Intl.message( 'Dismiss layout selector', name: 'scrim_layout_selector', - desc: 'Replaces default scrim label for layout selector bottom sheet.', + desc: + 'Replaces default scrim label for layout selector bottom sheet.', locale: localeName, ); } @@ -1683,7 +1704,8 @@ class L { Intl.message( 'Dismiss episode details', name: 'scrim_episode_details_selector', - desc: 'Replaces default scrim label for episode details bottom sheet.', + desc: + 'Replaces default scrim label for episode details bottom sheet.', locale: localeName, ); } @@ -1719,17 +1741,18 @@ class L { } } -class AnytimeLocalisationsDelegate extends LocalizationsDelegate { - const AnytimeLocalisationsDelegate(); +class PinepodsLocalisationsDelegate extends LocalizationsDelegate { + const PinepodsLocalisationsDelegate(); @override - bool isSupported(Locale locale) => ['en', 'de', 'it'].contains(locale.languageCode); + bool isSupported(Locale locale) => + ['en', 'de', 'it'].contains(locale.languageCode); @override Future load(Locale locale) => L.load(locale, const {}); @override - bool shouldReload(AnytimeLocalisationsDelegate old) => false; + bool shouldReload(PinepodsLocalisationsDelegate old) => false; } /// This class can be used by third-parties who wish to override or replace @@ -1746,7 +1769,8 @@ class EmbeddedLocalisationsDelegate extends LocalizationsDelegate { EmbeddedLocalisationsDelegate({@required this.messages = const {}}); @override - bool isSupported(Locale locale) => ['en', 'de', 'it'].contains(locale.languageCode); + bool isSupported(Locale locale) => + ['en', 'de', 'it'].contains(locale.languageCode); @override Future load(Locale locale) => L.load(locale, messages); diff --git a/mobile/lib/l10n/intl_de.arb b/mobile/lib/l10n/intl_de.arb index 4dacba59..55192f4c 100644 --- a/mobile/lib/l10n/intl_de.arb +++ b/mobile/lib/l10n/intl_de.arb @@ -1,6 +1,6 @@ { "@@last_modified": "2020-02-20T10:40:43.008209", - "app_title": "Anytime Podcast Player", + "app_title": "PinePods Podcast Player", "@app_title": { "description": "Full title for the application", "type": "text", @@ -324,7 +324,7 @@ "type": "text", "placeholders": {} }, - "consent_message": "Über diesen Finanzierungslink gelangen Sie zu einer externen Website, auf der Sie die Show direkt unterstützen können. Links werden von den Podcast-Autoren bereitgestellt und nicht von Anytime kontrolliert.", + "consent_message": "Über diesen Finanzierungslink gelangen Sie zu einer externen Website, auf der Sie die Show direkt unterstützen können. Links werden von den Podcast-Autoren bereitgestellt und nicht von PinePods kontrolliert.", "@consent_message": { "description": "Display when first accessing external funding link", "type": "text", @@ -657,7 +657,7 @@ "placeholders_order": [], "placeholders": {} }, - "transcript_why_not_url": "https://anytimeplayer.app/docs/anytime_transcript_support_de.html", + "transcript_why_not_url": "https://www.pinepods.online/docs/Features/Transcript", "@transcript_why_not_url": { "description": "Language specific link", "type": "text", @@ -1065,4 +1065,4 @@ "type": "text", "placeholders": {} } -} \ No newline at end of file +} diff --git a/mobile/lib/l10n/intl_en.arb b/mobile/lib/l10n/intl_en.arb index fbcd5919..3b2ba58e 100644 --- a/mobile/lib/l10n/intl_en.arb +++ b/mobile/lib/l10n/intl_en.arb @@ -1,7 +1,7 @@ { "@@last_modified": "2020-02-20T12:15:52.645497", "@@locale": "en", - "app_title": "Anytime Podcast Player", + "app_title": "PinePods Podcast Player", "@app_title": { "description": "Full title for the application", "type": "text", @@ -325,7 +325,7 @@ "type": "text", "placeholders": {} }, - "consent_message": "This funding link will take you to an external site where you will be able to directly support the show. Links are provided by the podcast authors and is not controlled by Anytime.", + "consent_message": "This funding link will take you to an external site where you will be able to directly support the show. Links are provided by the podcast authors and is not controlled by PinePods.", "@consent_message": { "description": "Display when first accessing external funding link", "type": "text", @@ -469,7 +469,7 @@ "type": "text", "placeholders": {} }, - "settings_personalisation_divider_label": "PERSONALISATION", + "settings_personalisation_divider_label": "Personalisation", "@settings_personalisation_divider_label": { "description": "Settings divider label for personalisation", "type": "text", @@ -481,7 +481,7 @@ "type": "text", "placeholders": {} }, - "settings_playback_divider_label": "PLAYBACK", + "settings_playback_divider_label": "Playback", "@settings_playback_divider_label": { "description": "Settings divider label for playback", "type": "text", @@ -658,7 +658,7 @@ "placeholders_order": [], "placeholders": {} }, - "transcript_why_not_url": "https://anytimeplayer.app/docs/anytime_transcript_support_en.html", + "transcript_why_not_url": "https://www.pinepods.online/docs/Features/Transcript", "@transcript_why_not_url": { "description": "Language specific link", "type": "text", @@ -1066,4 +1066,4 @@ "type": "text", "placeholders": {} } -} \ No newline at end of file +} diff --git a/mobile/lib/l10n/intl_it.arb b/mobile/lib/l10n/intl_it.arb index a50bec33..74cf7af2 100644 --- a/mobile/lib/l10n/intl_it.arb +++ b/mobile/lib/l10n/intl_it.arb @@ -1,6 +1,6 @@ { "@@last_modified": "2024-04-09T17:34:52.645497", - "app_title": "Anytime Podcast Player", + "app_title": "PinePods Podcast Player", "@app_title": { "description": "Full title for the application", "type": "text", @@ -324,7 +324,7 @@ "type": "text", "placeholders": {} }, - "consent_message": "Questo link per la ricerca fondi ti porterà a un sito esterno dove avrai la possibilità di supportare direttamente questo show. I link sono forniti dagli autori del podcast e non sono verificati da Anytime.", + "consent_message": "Questo link per la ricerca fondi ti porterà a un sito esterno dove avrai la possibilità di supportare direttamente questo show. I link sono forniti dagli autori del podcast e non sono verificati da PinePods.", "@consent_message": { "description": "Display when first accessing external funding link", "type": "text", @@ -657,7 +657,7 @@ "placeholders_order": [], "placeholders": {} }, - "transcript_why_not_url": "https://anytimeplayer.app/docs/anytime_transcript_support_en.html", + "transcript_why_not_url": "https://www.pinepods.online/docs/Features/Transcript", "@transcript_why_not_url": { "description": "Language specific link", "type": "text", @@ -1065,4 +1065,4 @@ "type": "text", "placeholders": {} } -} \ No newline at end of file +} diff --git a/mobile/lib/l10n/intl_messages.arb b/mobile/lib/l10n/intl_messages.arb index e24bdc92..564f2df7 100644 --- a/mobile/lib/l10n/intl_messages.arb +++ b/mobile/lib/l10n/intl_messages.arb @@ -325,7 +325,7 @@ "type": "text", "placeholders": {} }, - "consent_message": "This funding link will take you to an external site where you will be able to directly support the show. Links are provided by the podcast authors and is not controlled by Anytime.", + "consent_message": "This funding link will take you to an external site where you will be able to directly support the show. Links are provided by the podcast authors and is not controlled by Pinepods.", "@consent_message": { "description": "Display when first accessing external funding link", "type": "text", @@ -469,7 +469,7 @@ "type": "text", "placeholders": {} }, - "settings_personalisation_divider_label": "PERSONALISATION", + "settings_personalisation_divider_label": "Personalisation", "@settings_personalisation_divider_label": { "description": "Settings divider label for personalisation", "type": "text", @@ -481,7 +481,7 @@ "type": "text", "placeholders": {} }, - "settings_playback_divider_label": "PLAYBACK", + "settings_playback_divider_label": "Playback", "@settings_playback_divider_label": { "description": "Settings divider label for playback", "type": "text", @@ -637,7 +637,7 @@ "type": "text", "placeholders": {} }, - "transcript_why_not_url": "https://anytimeplayer.app/docs/anytime_transcript_support_en.html", + "transcript_why_not_url": "https://www.pinepods.online/docs/Features/Transcript", "@transcript_why_not_url": { "description": "Language specific link", "type": "text", @@ -1011,4 +1011,4 @@ "type": "text", "placeholders": {} } -} \ No newline at end of file +} diff --git a/mobile/lib/l10n/messages_de.dart b/mobile/lib/l10n/messages_de.dart index 265c2d71..dffa80e7 100644 --- a/mobile/lib/l10n/messages_de.dart +++ b/mobile/lib/l10n/messages_de.dart @@ -14,8 +14,7 @@ import 'package:intl/message_lookup_by_library.dart'; final messages = MessageLookup(); -typedef String? MessageIfAbsent( - String? messageStr, List? args); +typedef String? MessageIfAbsent(String? messageStr, List? args); class MessageLookup extends MessageLookupByLibrary { @override @@ -24,176 +23,342 @@ class MessageLookup extends MessageLookupByLibrary { static m0(minutes) => "${minutes} Minuten"; @override - final Map messages = _notInlinedMessages(_notInlinedMessages); + final Map messages = + _notInlinedMessages(_notInlinedMessages); static Map _notInlinedMessages(_) => { - 'about_label': MessageLookupByLibrary.simpleMessage('Über'), - 'add_rss_feed_option': MessageLookupByLibrary.simpleMessage('RSS-Feed hinzufügen'), - 'app_title': MessageLookupByLibrary.simpleMessage('Pinepods Podcast Client'), - 'app_title_short': MessageLookupByLibrary.simpleMessage('Pinepods'), - 'audio_effect_trim_silence_label': MessageLookupByLibrary.simpleMessage('Stille Trimmen'), - 'audio_effect_volume_boost_label': MessageLookupByLibrary.simpleMessage('Lautstärke-Boost'), - 'audio_settings_playback_speed_label': MessageLookupByLibrary.simpleMessage('Wiedergabe Schnelligkeit'), - 'auto_scroll_transcript_label': MessageLookupByLibrary.simpleMessage('Follow the transcript'), - 'cancel_button_label': MessageLookupByLibrary.simpleMessage('Stornieren'), - 'cancel_download_button_label': MessageLookupByLibrary.simpleMessage('Download abbrechen'), - 'cancel_option_label': MessageLookupByLibrary.simpleMessage('Stirbuereb'), - 'chapters_label': MessageLookupByLibrary.simpleMessage('Kapitel'), - 'clear_queue_button_label': MessageLookupByLibrary.simpleMessage('WARTESCHLANGE LÖSCHEN'), - 'clear_search_button_label': MessageLookupByLibrary.simpleMessage('Suchtext löschen'), - 'close_button_label': MessageLookupByLibrary.simpleMessage('Schließen'), - 'consent_message': MessageLookupByLibrary.simpleMessage('Über diesen Finanzierungslink gelangen Sie zu einer externen Website, auf der Sie die Show direkt unterstützen können. Links werden von den Podcast-Autoren bereitgestellt und nicht von Anytime kontrolliert.'), - 'continue_button_label': MessageLookupByLibrary.simpleMessage('Fortsetzen'), - 'delete_button_label': MessageLookupByLibrary.simpleMessage('Löschen'), - 'delete_episode_button_label': MessageLookupByLibrary.simpleMessage('Download -Episode löschen'), - 'delete_episode_confirmation': MessageLookupByLibrary.simpleMessage('Sind Sie sicher, dass Sie diese Episode löschen möchten?'), - 'delete_episode_title': MessageLookupByLibrary.simpleMessage('Folge löschen'), - 'delete_label': MessageLookupByLibrary.simpleMessage('Löschen'), - 'discover': MessageLookupByLibrary.simpleMessage('Entdecken'), - 'discovery_categories_itunes': MessageLookupByLibrary.simpleMessage(',Künste,Geschäft,Komödie,Ausbildung,Fiktion,Regierung,Gesundheit & Fitness,Geschichte,Kinder & Familie,Freizeit,Musik,Die Nachrichten,Religion & Spiritualität,Wissenschaft,Gesellschaft & Kultur,Sport,Fernsehen & Film,Technologie,Echte Kriminalität'), - 'discovery_categories_pindex': MessageLookupByLibrary.simpleMessage(',After-Shows,Alternative,Tiere,Animation,Kunst,Astronomie,Automobil,Luftfahrt,Baseball,Basketball,Schönheit,Bücher,Buddhismus,Geschäft,Karriere,Chemie,Christentum,Klima,Komödie,Kommentar,Kurse,Kunsthandwerk,Kricket,Kryptowährung,Kultur,Täglich,Design,Dokumentarfilm,Theater,Erde,Ausbildung,Unterhaltung,Unternehmerschaft,Familie,Fantasie,Mode,Fiktion,Film,Fitness,Essen,Fußball,Spiele,Garten,Golf,Regierung,Gesundheit,Hinduismus,Geschichte,Hobbys,Eishockey,Heim,Wieman,Improvisieren,Vorstellungsgespräche,Investieren,Islam,Zeitschriften,Judentum,Kinder,Sprache,Lernen,Freizeit,Leben,Management,Manga,Marketing,Mathematik,Medizin,geistig,Musik,Natürlich,Natur,Nachricht,Gemeinnützig,Ernährung,Erziehung,Aufführung,Persönlich,Haustiere,Philosophie,Physik,Setzt,Politik,Beziehungen,Religion,Bewertungen,Rollenspiel,Rugby,Betrieb,Wissenschaft,Selbstverbesserung,Sexualität,Fußball,Sozial,Gesellschaft,Spiritualität,Sport,Aufstehen,Geschichten,Baden,FERNSEHER,Tischplatte,Technologie,Tennis,Reisen,EchteKriminalität,Videospiele,Visuell,Volleyball,Wetter,Wildnis,Ringen'), - 'download_episode_button_label': MessageLookupByLibrary.simpleMessage('Folge herunterladen'), - 'downloads': MessageLookupByLibrary.simpleMessage('Herunterladen'), - 'empty_queue_message': MessageLookupByLibrary.simpleMessage('Ihre Warteschlange ist leer'), - 'episode_details_button_label': MessageLookupByLibrary.simpleMessage('Episodeninformationen anzeigen'), - 'episode_filter_clear_filters_button_label': MessageLookupByLibrary.simpleMessage('Filter zurücksetzen'), - 'episode_filter_no_episodes_title_description': MessageLookupByLibrary.simpleMessage('Dieser Podcast hat keine Episoden, die Ihren Suchkriterien und Filtern entsprechen'), - 'episode_filter_no_episodes_title_label': MessageLookupByLibrary.simpleMessage('Keine Episoden Gefunden'), - 'episode_filter_none_label': MessageLookupByLibrary.simpleMessage('Keiner'), - 'episode_filter_played_label': MessageLookupByLibrary.simpleMessage('Gespielt'), - 'episode_filter_semantic_label': MessageLookupByLibrary.simpleMessage('Episoden filtern'), - 'episode_filter_started_label': MessageLookupByLibrary.simpleMessage('Gestartet'), - 'episode_filter_unplayed_label': MessageLookupByLibrary.simpleMessage('Nicht gespielt'), - 'episode_label': MessageLookupByLibrary.simpleMessage('Episode'), - 'episode_sort_alphabetical_ascending_label': MessageLookupByLibrary.simpleMessage('Alphabetisch von A bis Z'), - 'episode_sort_alphabetical_descending_label': MessageLookupByLibrary.simpleMessage('Alphabetisch von Z bis A'), - 'episode_sort_earliest_first_label': MessageLookupByLibrary.simpleMessage('Das Älteste zuerst'), - 'episode_sort_latest_first_label': MessageLookupByLibrary.simpleMessage('Das Neueste zuerst'), - 'episode_sort_none_label': MessageLookupByLibrary.simpleMessage('Standard'), - 'episode_sort_semantic_label': MessageLookupByLibrary.simpleMessage('Episoden sortieren'), - 'error_no_connection': MessageLookupByLibrary.simpleMessage('Episode kann nicht abgespielt werden. Überprüfen Sie bitte Ihre Verbindung und versuchen Sie es erneut.'), - 'error_playback_fail': MessageLookupByLibrary.simpleMessage('Während der Wiedergabe ist ein unerwarteter Fehler aufgetreten. Überprüfen Sie bitte Ihre Verbindung und versuchen Sie es erneut.'), - 'fast_forward_button_label': MessageLookupByLibrary.simpleMessage('30 Sekunden schneller Vorlauf'), - 'feedback_menu_item_label': MessageLookupByLibrary.simpleMessage('Rückmeldung'), - 'go_back_button_label': MessageLookupByLibrary.simpleMessage('Geh Zurück'), - 'label_opml_importing': MessageLookupByLibrary.simpleMessage('Importieren'), - 'layout_label': MessageLookupByLibrary.simpleMessage('Layout'), - 'library': MessageLookupByLibrary.simpleMessage('Bibliothek'), - 'mark_episodes_not_played_label': MessageLookupByLibrary.simpleMessage('Markieren Sie alle Folgen als nicht abgespielt'), - 'mark_episodes_played_label': MessageLookupByLibrary.simpleMessage('Markieren Sie alle Episoden als abgespielt'), - 'mark_played_label': MessageLookupByLibrary.simpleMessage('Markieren gespielt'), - 'mark_unplayed_label': MessageLookupByLibrary.simpleMessage('Markieren nicht abgespielt'), - 'minimise_player_window_button_label': MessageLookupByLibrary.simpleMessage('Wiedergabebildschirm minimieren'), - 'more_label': MessageLookupByLibrary.simpleMessage('Mehr'), - 'new_episodes_label': MessageLookupByLibrary.simpleMessage('Neue Folgen sind verfügbar'), - 'new_episodes_view_now_label': MessageLookupByLibrary.simpleMessage('JETZT ANZEIGEN'), - 'no_downloads_message': MessageLookupByLibrary.simpleMessage('Sie haben keine Episoden heruntergeladen'), - 'no_podcast_details_message': MessageLookupByLibrary.simpleMessage('Podcast-Episoden konnten nicht geladen werden. Bitte überprüfen Sie Ihre Verbindung.'), - 'no_search_results_message': MessageLookupByLibrary.simpleMessage('Keine Podcasts gefunden'), - 'no_subscriptions_message': MessageLookupByLibrary.simpleMessage('Tippen Sie unten auf die Schaltfläche „Entdecken“ oder verwenden Sie die Suchleiste oben, um Ihren ersten Podcast zu finden'), - 'no_transcript_available_label': MessageLookupByLibrary.simpleMessage('Für diesen Podcast ist kein Transkript verfügbar'), - 'notes_label': MessageLookupByLibrary.simpleMessage('Notizen'), - 'now_playing_episode_position': MessageLookupByLibrary.simpleMessage('Episodenposition'), - 'now_playing_episode_time_remaining': MessageLookupByLibrary.simpleMessage('Verbleibende Zeit'), - 'now_playing_queue_label': MessageLookupByLibrary.simpleMessage('Jetzt Spielen'), - 'ok_button_label': MessageLookupByLibrary.simpleMessage('OK'), - 'open_show_website_label': MessageLookupByLibrary.simpleMessage('Show-Website öffnen'), - 'opml_export_button_label': MessageLookupByLibrary.simpleMessage('Export'), - 'opml_import_button_label': MessageLookupByLibrary.simpleMessage('Importieren'), - 'opml_import_export_label': MessageLookupByLibrary.simpleMessage('OPML Importieren/Export'), - 'pause_button_label': MessageLookupByLibrary.simpleMessage('Folge pausieren'), - 'play_button_label': MessageLookupByLibrary.simpleMessage('Folge abspielen'), - 'play_download_button_label': MessageLookupByLibrary.simpleMessage('Heruntergeladene Episode abspielen'), - 'playback_speed_label': MessageLookupByLibrary.simpleMessage('Stellen Sie die Wiedergabegeschwindigkeit ein'), - 'podcast_funding_dialog_header': MessageLookupByLibrary.simpleMessage('Podcast-Finanzierung'), - 'podcast_options_overflow_menu_semantic_label': MessageLookupByLibrary.simpleMessage('Optionsmenü'), - 'queue_add_label': MessageLookupByLibrary.simpleMessage('Addieren'), - 'queue_clear_button_label': MessageLookupByLibrary.simpleMessage('Klar'), - 'queue_clear_label': MessageLookupByLibrary.simpleMessage('Möchten Sie die Warteschlange wirklich löschen?'), - 'queue_clear_label_title': MessageLookupByLibrary.simpleMessage('Warteschlange löschen'), - 'queue_remove_label': MessageLookupByLibrary.simpleMessage('Entfernen'), - 'refresh_feed_label': MessageLookupByLibrary.simpleMessage('Holen Sie sich neue Episoden'), - 'resume_button_label': MessageLookupByLibrary.simpleMessage('Folge fortsetzen'), - 'rewind_button_label': MessageLookupByLibrary.simpleMessage('10 Sekunden zurückspulen'), - 'scrim_episode_details_selector': MessageLookupByLibrary.simpleMessage('Episodendetails schließen'), - 'scrim_episode_filter_selector': MessageLookupByLibrary.simpleMessage('Episodenfilter schließen'), - 'scrim_episode_sort_selector': MessageLookupByLibrary.simpleMessage('Episodensortierung schließen'), - 'scrim_layout_selector': MessageLookupByLibrary.simpleMessage('Layout-Auswahl schließen'), - 'scrim_sleep_timer_selector': MessageLookupByLibrary.simpleMessage('Auswahl des Sleep-Timers schließen'), - 'scrim_speed_selector': MessageLookupByLibrary.simpleMessage('Auswahl der Wiedergabegeschwindigkeit schließen'), - 'search_back_button_label': MessageLookupByLibrary.simpleMessage('Zurück'), - 'search_button_label': MessageLookupByLibrary.simpleMessage('Suche'), - 'search_episodes_label': MessageLookupByLibrary.simpleMessage('Folgen suchen'), - 'search_for_podcasts_hint': MessageLookupByLibrary.simpleMessage('Suche nach Podcasts'), - 'search_provider_label': MessageLookupByLibrary.simpleMessage('Suchmaschine'), - 'search_transcript_label': MessageLookupByLibrary.simpleMessage('Transkript suchen'), - 'semantic_announce_searching': MessageLookupByLibrary.simpleMessage('Suchen, bitte warten.'), - 'semantic_chapter_link_label': MessageLookupByLibrary.simpleMessage('Weblink zum Kapitel'), - 'semantic_current_chapter_label': MessageLookupByLibrary.simpleMessage('Aktuelles Kapitel'), - 'semantic_current_value_label': MessageLookupByLibrary.simpleMessage('Aktueller Wert'), - 'semantic_playing_options_collapse_label': MessageLookupByLibrary.simpleMessage('Schließen Sie den Schieberegler für die Wiedergabeoptionen'), - 'semantic_playing_options_expand_label': MessageLookupByLibrary.simpleMessage('Öffnen Sie den Schieberegler für die Wiedergabeoptionen'), - 'semantic_podcast_artwork_label': MessageLookupByLibrary.simpleMessage('Podcast-Artwork'), - 'semantics_add_to_queue': MessageLookupByLibrary.simpleMessage('Fügen Sie die Episode zur Warteschlange hinzu'), - 'semantics_collapse_podcast_description': MessageLookupByLibrary.simpleMessage('Collapse Podcast Beschreibung'), - 'semantics_decrease_playback_speed': MessageLookupByLibrary.simpleMessage('Verringern Sie die Wiedergabegeschwindigkeit'), - 'semantics_episode_tile_collapsed': MessageLookupByLibrary.simpleMessage('Episodenlistenelement. Zeigt Bild, Zusammenfassung und Hauptsteuerelemente.'), - 'semantics_episode_tile_collapsed_hint': MessageLookupByLibrary.simpleMessage('erweitern und weitere Details und zusätzliche Optionen anzeigen'), - 'semantics_episode_tile_expanded': MessageLookupByLibrary.simpleMessage('Episodenlistenelement. Beschreibung, Hauptsteuerelemente und zusätzliche Steuerelemente werden angezeigt.'), - 'semantics_episode_tile_expanded_hint': MessageLookupByLibrary.simpleMessage('Reduzieren und Zusammenfassung anzeigen, Download- und Wiedergabesteuerung'), - 'semantics_expand_podcast_description': MessageLookupByLibrary.simpleMessage('Erweitern Sie die Beschreibung der Podcast'), - 'semantics_increase_playback_speed': MessageLookupByLibrary.simpleMessage('Erhöhen Sie die Wiedergabegeschwindigkeit'), - 'semantics_layout_option_compact_grid': MessageLookupByLibrary.simpleMessage('Kompaktes Rasterlayout'), - 'semantics_layout_option_grid': MessageLookupByLibrary.simpleMessage('Gitterstruktur'), - 'semantics_layout_option_list': MessageLookupByLibrary.simpleMessage('Listenlayout'), - 'semantics_main_player_header': MessageLookupByLibrary.simpleMessage('Hauptfenster des Players'), - 'semantics_mark_episode_played': MessageLookupByLibrary.simpleMessage('Mark Episode as played'), - 'semantics_mark_episode_unplayed': MessageLookupByLibrary.simpleMessage('Mark Episode as un-played'), - 'semantics_mini_player_header': MessageLookupByLibrary.simpleMessage('Mini-Player. Wischen Sie nach rechts, um die Schaltfläche „Wiedergabe/Pause“ anzuzeigen. Aktivieren, um das Hauptfenster des Players zu öffnen'), - 'semantics_play_pause_toggle': MessageLookupByLibrary.simpleMessage('Umschalten zwischen Wiedergabe und Pause'), - 'semantics_podcast_details_header': MessageLookupByLibrary.simpleMessage('Podcast-Details und Episodenseite'), - 'semantics_remove_from_queue': MessageLookupByLibrary.simpleMessage('Entfernen Sie die Episode aus der Warteschlange'), - 'settings_auto_open_now_playing': MessageLookupByLibrary.simpleMessage('Vollbild-Player-Modus beim Episodenstart'), - 'settings_auto_update_episodes': MessageLookupByLibrary.simpleMessage('Folgen automatisch aktualisieren'), - 'settings_auto_update_episodes_10min': MessageLookupByLibrary.simpleMessage('10 Minuten seit dem letzten Update'), - 'settings_auto_update_episodes_12hour': MessageLookupByLibrary.simpleMessage('12 Stunden seit dem letzten Update'), - 'settings_auto_update_episodes_1hour': MessageLookupByLibrary.simpleMessage('1 Stunde seit dem letzten Update'), - 'settings_auto_update_episodes_30min': MessageLookupByLibrary.simpleMessage('30 Minuten seit dem letzten Update'), - 'settings_auto_update_episodes_3hour': MessageLookupByLibrary.simpleMessage('3 Stunden seit dem letzten Update'), - 'settings_auto_update_episodes_6hour': MessageLookupByLibrary.simpleMessage('6 Stunden seit dem letzten Update'), - 'settings_auto_update_episodes_always': MessageLookupByLibrary.simpleMessage('Immer'), - 'settings_auto_update_episodes_heading': MessageLookupByLibrary.simpleMessage('Folgen in der Detailansicht aktualisieren, nachdem'), - 'settings_auto_update_episodes_never': MessageLookupByLibrary.simpleMessage('Noch nie'), - 'settings_data_divider_label': MessageLookupByLibrary.simpleMessage('DATEN'), - 'settings_delete_played_label': MessageLookupByLibrary.simpleMessage('Heruntergeladene Episoden nach dem Abspielen löschen'), - 'settings_download_sd_card_label': MessageLookupByLibrary.simpleMessage('Episoden auf SD-Karte herunterladen'), - 'settings_download_switch_card': MessageLookupByLibrary.simpleMessage('Neue Downloads werden auf der SD-Karte gespeichert. Bestehende Downloads bleiben im internen Speicher.'), - 'settings_download_switch_internal': MessageLookupByLibrary.simpleMessage('Neue Downloads werden im internen Speicher gespeichert. Bestehende Downloads verbleiben auf der SD-Karte.'), - 'settings_download_switch_label': MessageLookupByLibrary.simpleMessage('Speicherort ändern'), - 'settings_episodes_divider_label': MessageLookupByLibrary.simpleMessage('EPISODEN'), - 'settings_export_opml': MessageLookupByLibrary.simpleMessage('OPML exportieren'), - 'settings_import_opml': MessageLookupByLibrary.simpleMessage('OPML importieren'), - 'settings_label': MessageLookupByLibrary.simpleMessage('Einstellungen'), - 'settings_mark_deleted_played_label': MessageLookupByLibrary.simpleMessage('Markieren Sie gelöschte Episoden als abgespielt'), - 'settings_personalisation_divider_label': MessageLookupByLibrary.simpleMessage('PERSONALISIERUNG'), - 'settings_playback_divider_label': MessageLookupByLibrary.simpleMessage('WIEDERGABE'), - 'settings_theme_switch_label': MessageLookupByLibrary.simpleMessage('Dark theme'), - 'show_notes_label': MessageLookupByLibrary.simpleMessage('Notizen anzeigen'), - 'sleep_episode_label': MessageLookupByLibrary.simpleMessage('Ende der Folge'), - 'sleep_minute_label': m0, - 'sleep_off_label': MessageLookupByLibrary.simpleMessage('Aus'), - 'sleep_timer_label': MessageLookupByLibrary.simpleMessage('Sleep-Timer'), - 'stop_download_button_label': MessageLookupByLibrary.simpleMessage('Halt'), - 'stop_download_confirmation': MessageLookupByLibrary.simpleMessage('Möchten Sie diesen Download wirklich beenden und die Episode löschen?'), - 'stop_download_title': MessageLookupByLibrary.simpleMessage('Stop Download'), - 'subscribe_button_label': MessageLookupByLibrary.simpleMessage('Folgen'), - 'subscribe_label': MessageLookupByLibrary.simpleMessage('Folgen'), - 'transcript_label': MessageLookupByLibrary.simpleMessage('Transkript'), - 'transcript_why_not_label': MessageLookupByLibrary.simpleMessage('Warum nicht?'), - 'transcript_why_not_url': MessageLookupByLibrary.simpleMessage('https://anytimeplayer.app/docs/anytime_transcript_support_de.html'), - 'unsubscribe_button_label': MessageLookupByLibrary.simpleMessage('Entfolgen'), - 'unsubscribe_label': MessageLookupByLibrary.simpleMessage('Nicht mehr folgen'), - 'unsubscribe_message': MessageLookupByLibrary.simpleMessage('Wenn Sie nicht mehr folgen, werden alle heruntergeladenen Folgen dieses Podcasts gelöscht.'), - 'up_next_queue_label': MessageLookupByLibrary.simpleMessage('Als nächstes') - }; + 'about_label': MessageLookupByLibrary.simpleMessage('Über'), + 'add_rss_feed_option': + MessageLookupByLibrary.simpleMessage('RSS-Feed hinzufügen'), + 'app_title': + MessageLookupByLibrary.simpleMessage('Pinepods Podcast Client'), + 'app_title_short': MessageLookupByLibrary.simpleMessage('Pinepods'), + 'audio_effect_trim_silence_label': + MessageLookupByLibrary.simpleMessage('Stille Trimmen'), + 'audio_effect_volume_boost_label': + MessageLookupByLibrary.simpleMessage('Lautstärke-Boost'), + 'audio_settings_playback_speed_label': + MessageLookupByLibrary.simpleMessage('Wiedergabe Schnelligkeit'), + 'auto_scroll_transcript_label': + MessageLookupByLibrary.simpleMessage('Follow the transcript'), + 'cancel_button_label': + MessageLookupByLibrary.simpleMessage('Stornieren'), + 'cancel_download_button_label': + MessageLookupByLibrary.simpleMessage('Download abbrechen'), + 'cancel_option_label': + MessageLookupByLibrary.simpleMessage('Stirbuereb'), + 'chapters_label': MessageLookupByLibrary.simpleMessage('Kapitel'), + 'clear_queue_button_label': + MessageLookupByLibrary.simpleMessage('WARTESCHLANGE LÖSCHEN'), + 'clear_search_button_label': + MessageLookupByLibrary.simpleMessage('Suchtext löschen'), + 'close_button_label': MessageLookupByLibrary.simpleMessage('Schließen'), + 'consent_message': MessageLookupByLibrary.simpleMessage( + 'Über diesen Finanzierungslink gelangen Sie zu einer externen Website, auf der Sie die Show direkt unterstützen können. Links werden von den Podcast-Autoren bereitgestellt und nicht von Pinepods kontrolliert.'), + 'continue_button_label': + MessageLookupByLibrary.simpleMessage('Fortsetzen'), + 'delete_button_label': MessageLookupByLibrary.simpleMessage('Löschen'), + 'delete_episode_button_label': + MessageLookupByLibrary.simpleMessage('Download -Episode löschen'), + 'delete_episode_confirmation': MessageLookupByLibrary.simpleMessage( + 'Sind Sie sicher, dass Sie diese Episode löschen möchten?'), + 'delete_episode_title': + MessageLookupByLibrary.simpleMessage('Folge löschen'), + 'delete_label': MessageLookupByLibrary.simpleMessage('Löschen'), + 'discover': MessageLookupByLibrary.simpleMessage('Entdecken'), + 'discovery_categories_itunes': MessageLookupByLibrary.simpleMessage( + ',Künste,Geschäft,Komödie,Ausbildung,Fiktion,Regierung,Gesundheit & Fitness,Geschichte,Kinder & Familie,Freizeit,Musik,Die Nachrichten,Religion & Spiritualität,Wissenschaft,Gesellschaft & Kultur,Sport,Fernsehen & Film,Technologie,Echte Kriminalität'), + 'discovery_categories_pindex': MessageLookupByLibrary.simpleMessage( + ',After-Shows,Alternative,Tiere,Animation,Kunst,Astronomie,Automobil,Luftfahrt,Baseball,Basketball,Schönheit,Bücher,Buddhismus,Geschäft,Karriere,Chemie,Christentum,Klima,Komödie,Kommentar,Kurse,Kunsthandwerk,Kricket,Kryptowährung,Kultur,Täglich,Design,Dokumentarfilm,Theater,Erde,Ausbildung,Unterhaltung,Unternehmerschaft,Familie,Fantasie,Mode,Fiktion,Film,Fitness,Essen,Fußball,Spiele,Garten,Golf,Regierung,Gesundheit,Hinduismus,Geschichte,Hobbys,Eishockey,Heim,Wieman,Improvisieren,Vorstellungsgespräche,Investieren,Islam,Zeitschriften,Judentum,Kinder,Sprache,Lernen,Freizeit,Leben,Management,Manga,Marketing,Mathematik,Medizin,geistig,Musik,Natürlich,Natur,Nachricht,Gemeinnützig,Ernährung,Erziehung,Aufführung,Persönlich,Haustiere,Philosophie,Physik,Setzt,Politik,Beziehungen,Religion,Bewertungen,Rollenspiel,Rugby,Betrieb,Wissenschaft,Selbstverbesserung,Sexualität,Fußball,Sozial,Gesellschaft,Spiritualität,Sport,Aufstehen,Geschichten,Baden,FERNSEHER,Tischplatte,Technologie,Tennis,Reisen,EchteKriminalität,Videospiele,Visuell,Volleyball,Wetter,Wildnis,Ringen'), + 'download_episode_button_label': + MessageLookupByLibrary.simpleMessage('Folge herunterladen'), + 'downloads': MessageLookupByLibrary.simpleMessage('Herunterladen'), + 'empty_queue_message': + MessageLookupByLibrary.simpleMessage('Ihre Warteschlange ist leer'), + 'episode_details_button_label': MessageLookupByLibrary.simpleMessage( + 'Episodeninformationen anzeigen'), + 'episode_filter_clear_filters_button_label': + MessageLookupByLibrary.simpleMessage('Filter zurücksetzen'), + 'episode_filter_no_episodes_title_description': + MessageLookupByLibrary.simpleMessage( + 'Dieser Podcast hat keine Episoden, die Ihren Suchkriterien und Filtern entsprechen'), + 'episode_filter_no_episodes_title_label': + MessageLookupByLibrary.simpleMessage('Keine Episoden Gefunden'), + 'episode_filter_none_label': + MessageLookupByLibrary.simpleMessage('Keiner'), + 'episode_filter_played_label': + MessageLookupByLibrary.simpleMessage('Gespielt'), + 'episode_filter_semantic_label': + MessageLookupByLibrary.simpleMessage('Episoden filtern'), + 'episode_filter_started_label': + MessageLookupByLibrary.simpleMessage('Gestartet'), + 'episode_filter_unplayed_label': + MessageLookupByLibrary.simpleMessage('Nicht gespielt'), + 'episode_label': MessageLookupByLibrary.simpleMessage('Episode'), + 'episode_sort_alphabetical_ascending_label': + MessageLookupByLibrary.simpleMessage('Alphabetisch von A bis Z'), + 'episode_sort_alphabetical_descending_label': + MessageLookupByLibrary.simpleMessage('Alphabetisch von Z bis A'), + 'episode_sort_earliest_first_label': + MessageLookupByLibrary.simpleMessage('Das Älteste zuerst'), + 'episode_sort_latest_first_label': + MessageLookupByLibrary.simpleMessage('Das Neueste zuerst'), + 'episode_sort_none_label': + MessageLookupByLibrary.simpleMessage('Standard'), + 'episode_sort_semantic_label': + MessageLookupByLibrary.simpleMessage('Episoden sortieren'), + 'error_no_connection': MessageLookupByLibrary.simpleMessage( + 'Episode kann nicht abgespielt werden. Überprüfen Sie bitte Ihre Verbindung und versuchen Sie es erneut.'), + 'error_playback_fail': MessageLookupByLibrary.simpleMessage( + 'Während der Wiedergabe ist ein unerwarteter Fehler aufgetreten. Überprüfen Sie bitte Ihre Verbindung und versuchen Sie es erneut.'), + 'fast_forward_button_label': MessageLookupByLibrary.simpleMessage( + '30 Sekunden schneller Vorlauf'), + 'feedback_menu_item_label': + MessageLookupByLibrary.simpleMessage('Rückmeldung'), + 'go_back_button_label': + MessageLookupByLibrary.simpleMessage('Geh Zurück'), + 'label_opml_importing': + MessageLookupByLibrary.simpleMessage('Importieren'), + 'layout_label': MessageLookupByLibrary.simpleMessage('Layout'), + 'library': MessageLookupByLibrary.simpleMessage('Bibliothek'), + 'mark_episodes_not_played_label': MessageLookupByLibrary.simpleMessage( + 'Markieren Sie alle Folgen als nicht abgespielt'), + 'mark_episodes_played_label': MessageLookupByLibrary.simpleMessage( + 'Markieren Sie alle Episoden als abgespielt'), + 'mark_played_label': + MessageLookupByLibrary.simpleMessage('Markieren gespielt'), + 'mark_unplayed_label': + MessageLookupByLibrary.simpleMessage('Markieren nicht abgespielt'), + 'minimise_player_window_button_label': + MessageLookupByLibrary.simpleMessage( + 'Wiedergabebildschirm minimieren'), + 'more_label': MessageLookupByLibrary.simpleMessage('Mehr'), + 'new_episodes_label': + MessageLookupByLibrary.simpleMessage('Neue Folgen sind verfügbar'), + 'new_episodes_view_now_label': + MessageLookupByLibrary.simpleMessage('JETZT ANZEIGEN'), + 'no_downloads_message': MessageLookupByLibrary.simpleMessage( + 'Sie haben keine Episoden heruntergeladen'), + 'no_podcast_details_message': MessageLookupByLibrary.simpleMessage( + 'Podcast-Episoden konnten nicht geladen werden. Bitte überprüfen Sie Ihre Verbindung.'), + 'no_search_results_message': + MessageLookupByLibrary.simpleMessage('Keine Podcasts gefunden'), + 'no_subscriptions_message': MessageLookupByLibrary.simpleMessage( + 'Tippen Sie unten auf die Schaltfläche „Entdecken“ oder verwenden Sie die Suchleiste oben, um Ihren ersten Podcast zu finden'), + 'no_transcript_available_label': MessageLookupByLibrary.simpleMessage( + 'Für diesen Podcast ist kein Transkript verfügbar'), + 'notes_label': MessageLookupByLibrary.simpleMessage('Notizen'), + 'now_playing_episode_position': + MessageLookupByLibrary.simpleMessage('Episodenposition'), + 'now_playing_episode_time_remaining': + MessageLookupByLibrary.simpleMessage('Verbleibende Zeit'), + 'now_playing_queue_label': + MessageLookupByLibrary.simpleMessage('Jetzt Spielen'), + 'ok_button_label': MessageLookupByLibrary.simpleMessage('OK'), + 'open_show_website_label': + MessageLookupByLibrary.simpleMessage('Show-Website öffnen'), + 'opml_export_button_label': + MessageLookupByLibrary.simpleMessage('Export'), + 'opml_import_button_label': + MessageLookupByLibrary.simpleMessage('Importieren'), + 'opml_import_export_label': + MessageLookupByLibrary.simpleMessage('OPML Importieren/Export'), + 'pause_button_label': + MessageLookupByLibrary.simpleMessage('Folge pausieren'), + 'play_button_label': + MessageLookupByLibrary.simpleMessage('Folge abspielen'), + 'play_download_button_label': MessageLookupByLibrary.simpleMessage( + 'Heruntergeladene Episode abspielen'), + 'playback_speed_label': MessageLookupByLibrary.simpleMessage( + 'Stellen Sie die Wiedergabegeschwindigkeit ein'), + 'podcast_funding_dialog_header': + MessageLookupByLibrary.simpleMessage('Podcast-Finanzierung'), + 'podcast_options_overflow_menu_semantic_label': + MessageLookupByLibrary.simpleMessage('Optionsmenü'), + 'queue_add_label': MessageLookupByLibrary.simpleMessage('Addieren'), + 'queue_clear_button_label': + MessageLookupByLibrary.simpleMessage('Klar'), + 'queue_clear_label': MessageLookupByLibrary.simpleMessage( + 'Möchten Sie die Warteschlange wirklich löschen?'), + 'queue_clear_label_title': + MessageLookupByLibrary.simpleMessage('Warteschlange löschen'), + 'queue_remove_label': MessageLookupByLibrary.simpleMessage('Entfernen'), + 'refresh_feed_label': MessageLookupByLibrary.simpleMessage( + 'Holen Sie sich neue Episoden'), + 'resume_button_label': + MessageLookupByLibrary.simpleMessage('Folge fortsetzen'), + 'rewind_button_label': + MessageLookupByLibrary.simpleMessage('10 Sekunden zurückspulen'), + 'scrim_episode_details_selector': + MessageLookupByLibrary.simpleMessage('Episodendetails schließen'), + 'scrim_episode_filter_selector': + MessageLookupByLibrary.simpleMessage('Episodenfilter schließen'), + 'scrim_episode_sort_selector': MessageLookupByLibrary.simpleMessage( + 'Episodensortierung schließen'), + 'scrim_layout_selector': + MessageLookupByLibrary.simpleMessage('Layout-Auswahl schließen'), + 'scrim_sleep_timer_selector': MessageLookupByLibrary.simpleMessage( + 'Auswahl des Sleep-Timers schließen'), + 'scrim_speed_selector': MessageLookupByLibrary.simpleMessage( + 'Auswahl der Wiedergabegeschwindigkeit schließen'), + 'search_back_button_label': + MessageLookupByLibrary.simpleMessage('Zurück'), + 'search_button_label': MessageLookupByLibrary.simpleMessage('Suche'), + 'search_episodes_label': + MessageLookupByLibrary.simpleMessage('Folgen suchen'), + 'search_for_podcasts_hint': + MessageLookupByLibrary.simpleMessage('Suche nach Podcasts'), + 'search_provider_label': + MessageLookupByLibrary.simpleMessage('Suchmaschine'), + 'search_transcript_label': + MessageLookupByLibrary.simpleMessage('Transkript suchen'), + 'semantic_announce_searching': + MessageLookupByLibrary.simpleMessage('Suchen, bitte warten.'), + 'semantic_chapter_link_label': + MessageLookupByLibrary.simpleMessage('Weblink zum Kapitel'), + 'semantic_current_chapter_label': + MessageLookupByLibrary.simpleMessage('Aktuelles Kapitel'), + 'semantic_current_value_label': + MessageLookupByLibrary.simpleMessage('Aktueller Wert'), + 'semantic_playing_options_collapse_label': + MessageLookupByLibrary.simpleMessage( + 'Schließen Sie den Schieberegler für die Wiedergabeoptionen'), + 'semantic_playing_options_expand_label': + MessageLookupByLibrary.simpleMessage( + 'Öffnen Sie den Schieberegler für die Wiedergabeoptionen'), + 'semantic_podcast_artwork_label': + MessageLookupByLibrary.simpleMessage('Podcast-Artwork'), + 'semantics_add_to_queue': MessageLookupByLibrary.simpleMessage( + 'Fügen Sie die Episode zur Warteschlange hinzu'), + 'semantics_collapse_podcast_description': + MessageLookupByLibrary.simpleMessage( + 'Collapse Podcast Beschreibung'), + 'semantics_decrease_playback_speed': + MessageLookupByLibrary.simpleMessage( + 'Verringern Sie die Wiedergabegeschwindigkeit'), + 'semantics_episode_tile_collapsed': MessageLookupByLibrary.simpleMessage( + 'Episodenlistenelement. Zeigt Bild, Zusammenfassung und Hauptsteuerelemente.'), + 'semantics_episode_tile_collapsed_hint': + MessageLookupByLibrary.simpleMessage( + 'erweitern und weitere Details und zusätzliche Optionen anzeigen'), + 'semantics_episode_tile_expanded': MessageLookupByLibrary.simpleMessage( + 'Episodenlistenelement. Beschreibung, Hauptsteuerelemente und zusätzliche Steuerelemente werden angezeigt.'), + 'semantics_episode_tile_expanded_hint': + MessageLookupByLibrary.simpleMessage( + 'Reduzieren und Zusammenfassung anzeigen, Download- und Wiedergabesteuerung'), + 'semantics_expand_podcast_description': + MessageLookupByLibrary.simpleMessage( + 'Erweitern Sie die Beschreibung der Podcast'), + 'semantics_increase_playback_speed': + MessageLookupByLibrary.simpleMessage( + 'Erhöhen Sie die Wiedergabegeschwindigkeit'), + 'semantics_layout_option_compact_grid': + MessageLookupByLibrary.simpleMessage('Kompaktes Rasterlayout'), + 'semantics_layout_option_grid': + MessageLookupByLibrary.simpleMessage('Gitterstruktur'), + 'semantics_layout_option_list': + MessageLookupByLibrary.simpleMessage('Listenlayout'), + 'semantics_main_player_header': + MessageLookupByLibrary.simpleMessage('Hauptfenster des Players'), + 'semantics_mark_episode_played': + MessageLookupByLibrary.simpleMessage('Mark Episode as played'), + 'semantics_mark_episode_unplayed': + MessageLookupByLibrary.simpleMessage('Mark Episode as un-played'), + 'semantics_mini_player_header': MessageLookupByLibrary.simpleMessage( + 'Mini-Player. Wischen Sie nach rechts, um die Schaltfläche „Wiedergabe/Pause“ anzuzeigen. Aktivieren, um das Hauptfenster des Players zu öffnen'), + 'semantics_play_pause_toggle': MessageLookupByLibrary.simpleMessage( + 'Umschalten zwischen Wiedergabe und Pause'), + 'semantics_podcast_details_header': + MessageLookupByLibrary.simpleMessage( + 'Podcast-Details und Episodenseite'), + 'semantics_remove_from_queue': MessageLookupByLibrary.simpleMessage( + 'Entfernen Sie die Episode aus der Warteschlange'), + 'settings_auto_open_now_playing': MessageLookupByLibrary.simpleMessage( + 'Vollbild-Player-Modus beim Episodenstart'), + 'settings_auto_update_episodes': MessageLookupByLibrary.simpleMessage( + 'Folgen automatisch aktualisieren'), + 'settings_auto_update_episodes_10min': + MessageLookupByLibrary.simpleMessage( + '10 Minuten seit dem letzten Update'), + 'settings_auto_update_episodes_12hour': + MessageLookupByLibrary.simpleMessage( + '12 Stunden seit dem letzten Update'), + 'settings_auto_update_episodes_1hour': + MessageLookupByLibrary.simpleMessage( + '1 Stunde seit dem letzten Update'), + 'settings_auto_update_episodes_30min': + MessageLookupByLibrary.simpleMessage( + '30 Minuten seit dem letzten Update'), + 'settings_auto_update_episodes_3hour': + MessageLookupByLibrary.simpleMessage( + '3 Stunden seit dem letzten Update'), + 'settings_auto_update_episodes_6hour': + MessageLookupByLibrary.simpleMessage( + '6 Stunden seit dem letzten Update'), + 'settings_auto_update_episodes_always': + MessageLookupByLibrary.simpleMessage('Immer'), + 'settings_auto_update_episodes_heading': + MessageLookupByLibrary.simpleMessage( + 'Folgen in der Detailansicht aktualisieren, nachdem'), + 'settings_auto_update_episodes_never': + MessageLookupByLibrary.simpleMessage('Noch nie'), + 'settings_data_divider_label': + MessageLookupByLibrary.simpleMessage('DATEN'), + 'settings_delete_played_label': MessageLookupByLibrary.simpleMessage( + 'Heruntergeladene Episoden nach dem Abspielen löschen'), + 'settings_download_sd_card_label': MessageLookupByLibrary.simpleMessage( + 'Episoden auf SD-Karte herunterladen'), + 'settings_download_switch_card': MessageLookupByLibrary.simpleMessage( + 'Neue Downloads werden auf der SD-Karte gespeichert. Bestehende Downloads bleiben im internen Speicher.'), + 'settings_download_switch_internal': MessageLookupByLibrary.simpleMessage( + 'Neue Downloads werden im internen Speicher gespeichert. Bestehende Downloads verbleiben auf der SD-Karte.'), + 'settings_download_switch_label': + MessageLookupByLibrary.simpleMessage('Speicherort ändern'), + 'settings_episodes_divider_label': + MessageLookupByLibrary.simpleMessage('EPISODEN'), + 'settings_export_opml': + MessageLookupByLibrary.simpleMessage('OPML exportieren'), + 'settings_import_opml': + MessageLookupByLibrary.simpleMessage('OPML importieren'), + 'settings_label': MessageLookupByLibrary.simpleMessage('Einstellungen'), + 'settings_mark_deleted_played_label': + MessageLookupByLibrary.simpleMessage( + 'Markieren Sie gelöschte Episoden als abgespielt'), + 'settings_personalisation_divider_label': + MessageLookupByLibrary.simpleMessage('PERSONALISIERUNG'), + 'settings_playback_divider_label': + MessageLookupByLibrary.simpleMessage('WIEDERGABE'), + 'settings_theme_switch_label': + MessageLookupByLibrary.simpleMessage('Dark theme'), + 'show_notes_label': + MessageLookupByLibrary.simpleMessage('Notizen anzeigen'), + 'sleep_episode_label': + MessageLookupByLibrary.simpleMessage('Ende der Folge'), + 'sleep_minute_label': m0, + 'sleep_off_label': MessageLookupByLibrary.simpleMessage('Aus'), + 'sleep_timer_label': + MessageLookupByLibrary.simpleMessage('Sleep-Timer'), + 'stop_download_button_label': + MessageLookupByLibrary.simpleMessage('Halt'), + 'stop_download_confirmation': MessageLookupByLibrary.simpleMessage( + 'Möchten Sie diesen Download wirklich beenden und die Episode löschen?'), + 'stop_download_title': + MessageLookupByLibrary.simpleMessage('Stop Download'), + 'subscribe_button_label': + MessageLookupByLibrary.simpleMessage('Folgen'), + 'subscribe_label': MessageLookupByLibrary.simpleMessage('Folgen'), + 'transcript_label': MessageLookupByLibrary.simpleMessage('Transkript'), + 'transcript_why_not_label': + MessageLookupByLibrary.simpleMessage('Warum nicht?'), + 'transcript_why_not_url': MessageLookupByLibrary.simpleMessage( + 'https://www.pinepods.online/docs/Features/Transcript'), + 'unsubscribe_button_label': + MessageLookupByLibrary.simpleMessage('Entfolgen'), + 'unsubscribe_label': + MessageLookupByLibrary.simpleMessage('Nicht mehr folgen'), + 'unsubscribe_message': MessageLookupByLibrary.simpleMessage( + 'Wenn Sie nicht mehr folgen, werden alle heruntergeladenen Folgen dieses Podcasts gelöscht.'), + 'up_next_queue_label': + MessageLookupByLibrary.simpleMessage('Als nächstes') + }; } diff --git a/mobile/lib/l10n/messages_en.dart b/mobile/lib/l10n/messages_en.dart index a8c36c8f..41d3dbb3 100644 --- a/mobile/lib/l10n/messages_en.dart +++ b/mobile/lib/l10n/messages_en.dart @@ -14,8 +14,7 @@ import 'package:intl/message_lookup_by_library.dart'; final messages = MessageLookup(); -typedef String? MessageIfAbsent( - String? messageStr, List? args); +typedef String? MessageIfAbsent(String? messageStr, List? args); class MessageLookup extends MessageLookupByLibrary { @override @@ -24,176 +23,328 @@ class MessageLookup extends MessageLookupByLibrary { static m0(minutes) => "${minutes} minutes"; @override - final Map messages = _notInlinedMessages(_notInlinedMessages); + final Map messages = + _notInlinedMessages(_notInlinedMessages); static Map _notInlinedMessages(_) => { - 'about_label': MessageLookupByLibrary.simpleMessage('About'), - 'add_rss_feed_option': MessageLookupByLibrary.simpleMessage('Add RSS Feed'), - 'app_title': MessageLookupByLibrary.simpleMessage('Pinepods Podcast Client'), - 'app_title_short': MessageLookupByLibrary.simpleMessage('Pinepods'), - 'audio_effect_trim_silence_label': MessageLookupByLibrary.simpleMessage('Trim Silence'), - 'audio_effect_volume_boost_label': MessageLookupByLibrary.simpleMessage('Volume Boost'), - 'audio_settings_playback_speed_label': MessageLookupByLibrary.simpleMessage('Playback Speed'), - 'auto_scroll_transcript_label': MessageLookupByLibrary.simpleMessage('Follow transcript'), - 'cancel_button_label': MessageLookupByLibrary.simpleMessage('Cancel'), - 'cancel_download_button_label': MessageLookupByLibrary.simpleMessage('Cancel download'), - 'cancel_option_label': MessageLookupByLibrary.simpleMessage('Cancel'), - 'chapters_label': MessageLookupByLibrary.simpleMessage('Chapters'), - 'clear_queue_button_label': MessageLookupByLibrary.simpleMessage('CLEAR QUEUE'), - 'clear_search_button_label': MessageLookupByLibrary.simpleMessage('Clear search text'), - 'close_button_label': MessageLookupByLibrary.simpleMessage('Close'), - 'consent_message': MessageLookupByLibrary.simpleMessage('This funding link will take you to an external site where you will be able to directly support the show. Links are provided by the podcast authors and is not controlled by Anytime.'), - 'continue_button_label': MessageLookupByLibrary.simpleMessage('Continue'), - 'delete_button_label': MessageLookupByLibrary.simpleMessage('Delete'), - 'delete_episode_button_label': MessageLookupByLibrary.simpleMessage('Delete downloaded episode'), - 'delete_episode_confirmation': MessageLookupByLibrary.simpleMessage('Are you sure you wish to delete this episode?'), - 'delete_episode_title': MessageLookupByLibrary.simpleMessage('Delete Episode'), - 'delete_label': MessageLookupByLibrary.simpleMessage('Delete'), - 'discover': MessageLookupByLibrary.simpleMessage('Discover'), - 'discovery_categories_itunes': MessageLookupByLibrary.simpleMessage(',Arts,Business,Comedy,Education,Fiction,Government,Health & Fitness,History,Kids & Family,Leisure,Music,News,Religion & Spirituality,Science,Society & Culture,Sports,TV & Film,Technology,True Crime'), - 'discovery_categories_pindex': MessageLookupByLibrary.simpleMessage(',After-Shows,Alternative,Animals,Animation,Arts,Astronomy,Automotive,Aviation,Baseball,Basketball,Beauty,Books,Buddhism,Business,Careers,Chemistry,Christianity,Climate,Comedy,Commentary,Courses,Crafts,Cricket,Cryptocurrency,Culture,Daily,Design,Documentary,Drama,Earth,Education,Entertainment,Entrepreneurship,Family,Fantasy,Fashion,Fiction,Film,Fitness,Food,Football,Games,Garden,Golf,Government,Health,Hinduism,History,Hobbies,Hockey,Home,HowTo,Improv,Interviews,Investing,Islam,Journals,Judaism,Kids,Language,Learning,Leisure,Life,Management,Manga,Marketing,Mathematics,Medicine,Mental,Music,Natural,Nature,News,NonProfit,Nutrition,Parenting,Performing,Personal,Pets,Philosophy,Physics,Places,Politics,Relationships,Religion,Reviews,Role-Playing,Rugby,Running,Science,Self-Improvement,Sexuality,Soccer,Social,Society,Spirituality,Sports,Stand-Up,Stories,Swimming,TV,Tabletop,Technology,Tennis,Travel,True Crime,Video-Games,Visual,Volleyball,Weather,Wilderness,Wrestling'), - 'download_episode_button_label': MessageLookupByLibrary.simpleMessage('Download episode'), - 'downloads': MessageLookupByLibrary.simpleMessage('Downloads'), - 'empty_queue_message': MessageLookupByLibrary.simpleMessage('Your queue is empty'), - 'episode_details_button_label': MessageLookupByLibrary.simpleMessage('Show episode information'), - 'episode_filter_clear_filters_button_label': MessageLookupByLibrary.simpleMessage('Clear Filters'), - 'episode_filter_no_episodes_title_description': MessageLookupByLibrary.simpleMessage('This podcast has no episodes matching your search criteria and filter'), - 'episode_filter_no_episodes_title_label': MessageLookupByLibrary.simpleMessage('No Episodes Found'), - 'episode_filter_none_label': MessageLookupByLibrary.simpleMessage('None'), - 'episode_filter_played_label': MessageLookupByLibrary.simpleMessage('Played'), - 'episode_filter_semantic_label': MessageLookupByLibrary.simpleMessage('Filter episodes'), - 'episode_filter_started_label': MessageLookupByLibrary.simpleMessage('Started'), - 'episode_filter_unplayed_label': MessageLookupByLibrary.simpleMessage('Unplayed'), - 'episode_label': MessageLookupByLibrary.simpleMessage('Episode'), - 'episode_sort_alphabetical_ascending_label': MessageLookupByLibrary.simpleMessage('Alphabetical A-Z'), - 'episode_sort_alphabetical_descending_label': MessageLookupByLibrary.simpleMessage('Alphabetical Z-A'), - 'episode_sort_earliest_first_label': MessageLookupByLibrary.simpleMessage('Earliest first'), - 'episode_sort_latest_first_label': MessageLookupByLibrary.simpleMessage('Latest first'), - 'episode_sort_none_label': MessageLookupByLibrary.simpleMessage('Default'), - 'episode_sort_semantic_label': MessageLookupByLibrary.simpleMessage('Sort episodes'), - 'error_no_connection': MessageLookupByLibrary.simpleMessage('Unable to play episode. Please check your connection and try again.'), - 'error_playback_fail': MessageLookupByLibrary.simpleMessage('An unexpected error occurred during playback. Please check your connection and try again.'), - 'fast_forward_button_label': MessageLookupByLibrary.simpleMessage('Fast-forward episode 30 seconds'), - 'feedback_menu_item_label': MessageLookupByLibrary.simpleMessage('Feedback'), - 'go_back_button_label': MessageLookupByLibrary.simpleMessage('Go Back'), - 'label_opml_importing': MessageLookupByLibrary.simpleMessage('Importing'), - 'layout_label': MessageLookupByLibrary.simpleMessage('Layout'), - 'library': MessageLookupByLibrary.simpleMessage('Library'), - 'mark_episodes_not_played_label': MessageLookupByLibrary.simpleMessage('Mark all episodes as not played'), - 'mark_episodes_played_label': MessageLookupByLibrary.simpleMessage('Mark all episodes as played'), - 'mark_played_label': MessageLookupByLibrary.simpleMessage('Mark Played'), - 'mark_unplayed_label': MessageLookupByLibrary.simpleMessage('Mark Unplayed'), - 'minimise_player_window_button_label': MessageLookupByLibrary.simpleMessage('Minimise player window'), - 'more_label': MessageLookupByLibrary.simpleMessage('More'), - 'new_episodes_label': MessageLookupByLibrary.simpleMessage('New episodes are available'), - 'new_episodes_view_now_label': MessageLookupByLibrary.simpleMessage('VIEW NOW'), - 'no_downloads_message': MessageLookupByLibrary.simpleMessage('You do not have any downloaded episodes'), - 'no_podcast_details_message': MessageLookupByLibrary.simpleMessage('Could not load podcast episodes. Please check your connection.'), - 'no_search_results_message': MessageLookupByLibrary.simpleMessage('No podcasts found'), - 'no_subscriptions_message': MessageLookupByLibrary.simpleMessage('Head to Settings to Connect a Pinepods Server if you haven\'t yet!'), - 'no_transcript_available_label': MessageLookupByLibrary.simpleMessage('A transcript is not available for this podcast'), - 'notes_label': MessageLookupByLibrary.simpleMessage('Description'), - 'now_playing_episode_position': MessageLookupByLibrary.simpleMessage('Episode position'), - 'now_playing_episode_time_remaining': MessageLookupByLibrary.simpleMessage('Time remaining'), - 'now_playing_queue_label': MessageLookupByLibrary.simpleMessage('Now Playing'), - 'ok_button_label': MessageLookupByLibrary.simpleMessage('OK'), - 'open_show_website_label': MessageLookupByLibrary.simpleMessage('Open show website'), - 'opml_export_button_label': MessageLookupByLibrary.simpleMessage('Export'), - 'opml_import_button_label': MessageLookupByLibrary.simpleMessage('Import'), - 'opml_import_export_label': MessageLookupByLibrary.simpleMessage('OPML Import/Export'), - 'pause_button_label': MessageLookupByLibrary.simpleMessage('Pause episode'), - 'play_button_label': MessageLookupByLibrary.simpleMessage('Play episode'), - 'play_download_button_label': MessageLookupByLibrary.simpleMessage('Play downloaded episode'), - 'playback_speed_label': MessageLookupByLibrary.simpleMessage('Playback speed'), - 'podcast_funding_dialog_header': MessageLookupByLibrary.simpleMessage('Podcast Funding'), - 'podcast_options_overflow_menu_semantic_label': MessageLookupByLibrary.simpleMessage('Options menu'), - 'queue_add_label': MessageLookupByLibrary.simpleMessage('Add'), - 'queue_clear_button_label': MessageLookupByLibrary.simpleMessage('Clear'), - 'queue_clear_label': MessageLookupByLibrary.simpleMessage('Are you sure you wish to clear the queue?'), - 'queue_clear_label_title': MessageLookupByLibrary.simpleMessage('Clear Queue'), - 'queue_remove_label': MessageLookupByLibrary.simpleMessage('Remove'), - 'refresh_feed_label': MessageLookupByLibrary.simpleMessage('Refresh episodes'), - 'resume_button_label': MessageLookupByLibrary.simpleMessage('Resume episode'), - 'rewind_button_label': MessageLookupByLibrary.simpleMessage('Rewind episode 10 seconds'), - 'scrim_episode_details_selector': MessageLookupByLibrary.simpleMessage('Dismiss episode details'), - 'scrim_episode_filter_selector': MessageLookupByLibrary.simpleMessage('Dismiss episode filter'), - 'scrim_episode_sort_selector': MessageLookupByLibrary.simpleMessage('Dismiss episode sort'), - 'scrim_layout_selector': MessageLookupByLibrary.simpleMessage('Dismiss layout selector'), - 'scrim_sleep_timer_selector': MessageLookupByLibrary.simpleMessage('Dismiss sleep timer selector'), - 'scrim_speed_selector': MessageLookupByLibrary.simpleMessage('Dismiss playback speed selector'), - 'search_back_button_label': MessageLookupByLibrary.simpleMessage('Back'), - 'search_button_label': MessageLookupByLibrary.simpleMessage('Search'), - 'search_episodes_label': MessageLookupByLibrary.simpleMessage('Search episodes'), - 'search_for_podcasts_hint': MessageLookupByLibrary.simpleMessage('Search for podcasts'), - 'search_provider_label': MessageLookupByLibrary.simpleMessage('Search provider'), - 'search_transcript_label': MessageLookupByLibrary.simpleMessage('Search transcript'), - 'semantic_announce_searching': MessageLookupByLibrary.simpleMessage('Searching, please wait.'), - 'semantic_chapter_link_label': MessageLookupByLibrary.simpleMessage('Chapter web link'), - 'semantic_current_chapter_label': MessageLookupByLibrary.simpleMessage('Current chapter'), - 'semantic_current_value_label': MessageLookupByLibrary.simpleMessage('Current value'), - 'semantic_playing_options_collapse_label': MessageLookupByLibrary.simpleMessage('Close playing options slider'), - 'semantic_playing_options_expand_label': MessageLookupByLibrary.simpleMessage('Open playing options slider'), - 'semantic_podcast_artwork_label': MessageLookupByLibrary.simpleMessage('Podcast artwork'), - 'semantics_add_to_queue': MessageLookupByLibrary.simpleMessage('Add episode to queue'), - 'semantics_collapse_podcast_description': MessageLookupByLibrary.simpleMessage('Collapse podcast description'), - 'semantics_decrease_playback_speed': MessageLookupByLibrary.simpleMessage('Decrease playback speed'), - 'semantics_episode_tile_collapsed': MessageLookupByLibrary.simpleMessage('Episode list item. Showing image, summary and main controls.'), - 'semantics_episode_tile_collapsed_hint': MessageLookupByLibrary.simpleMessage('expand and show more details and additional options'), - 'semantics_episode_tile_expanded': MessageLookupByLibrary.simpleMessage('Episode list item. Showing description, main controls and additional controls.'), - 'semantics_episode_tile_expanded_hint': MessageLookupByLibrary.simpleMessage('collapse and show summary, download and play control'), - 'semantics_expand_podcast_description': MessageLookupByLibrary.simpleMessage('Expand podcast description'), - 'semantics_increase_playback_speed': MessageLookupByLibrary.simpleMessage('Increase playback speed'), - 'semantics_layout_option_compact_grid': MessageLookupByLibrary.simpleMessage('Compact grid layout'), - 'semantics_layout_option_grid': MessageLookupByLibrary.simpleMessage('Grid layout'), - 'semantics_layout_option_list': MessageLookupByLibrary.simpleMessage('List layout'), - 'semantics_main_player_header': MessageLookupByLibrary.simpleMessage('Main player window'), - 'semantics_mark_episode_played': MessageLookupByLibrary.simpleMessage('Mark Episode as played'), - 'semantics_mark_episode_unplayed': MessageLookupByLibrary.simpleMessage('Mark Episode as un-played'), - 'semantics_mini_player_header': MessageLookupByLibrary.simpleMessage('Mini player. Swipe right to play/pause button. Activate to open main player window'), - 'semantics_play_pause_toggle': MessageLookupByLibrary.simpleMessage('Play/pause toggle'), - 'semantics_podcast_details_header': MessageLookupByLibrary.simpleMessage('Podcast details and episodes page'), - 'semantics_remove_from_queue': MessageLookupByLibrary.simpleMessage('Remove episode from queue'), - 'settings_auto_open_now_playing': MessageLookupByLibrary.simpleMessage('Full screen player mode on episode start'), - 'settings_auto_update_episodes': MessageLookupByLibrary.simpleMessage('Auto update episodes'), - 'settings_auto_update_episodes_10min': MessageLookupByLibrary.simpleMessage('10 minutes since last update'), - 'settings_auto_update_episodes_12hour': MessageLookupByLibrary.simpleMessage('12 hours since last update'), - 'settings_auto_update_episodes_1hour': MessageLookupByLibrary.simpleMessage('1 hour since last update'), - 'settings_auto_update_episodes_30min': MessageLookupByLibrary.simpleMessage('30 minutes since last update'), - 'settings_auto_update_episodes_3hour': MessageLookupByLibrary.simpleMessage('3 hours since last update'), - 'settings_auto_update_episodes_6hour': MessageLookupByLibrary.simpleMessage('6 hours since last update'), - 'settings_auto_update_episodes_always': MessageLookupByLibrary.simpleMessage('Always'), - 'settings_auto_update_episodes_heading': MessageLookupByLibrary.simpleMessage('Refresh episodes on details screen after'), - 'settings_auto_update_episodes_never': MessageLookupByLibrary.simpleMessage('Never'), - 'settings_data_divider_label': MessageLookupByLibrary.simpleMessage('DATA'), - 'settings_delete_played_label': MessageLookupByLibrary.simpleMessage('Delete downloaded episodes once played'), - 'settings_download_sd_card_label': MessageLookupByLibrary.simpleMessage('Download episodes to SD card'), - 'settings_download_switch_card': MessageLookupByLibrary.simpleMessage('New downloads will be saved to the SD card. Existing downloads will remain on internal storage.'), - 'settings_download_switch_internal': MessageLookupByLibrary.simpleMessage('New downloads will be saved to internal storage. Existing downloads will remain on the SD card.'), - 'settings_download_switch_label': MessageLookupByLibrary.simpleMessage('Change storage location'), - 'settings_episodes_divider_label': MessageLookupByLibrary.simpleMessage('EPISODES'), - 'settings_export_opml': MessageLookupByLibrary.simpleMessage('Export OPML'), - 'settings_import_opml': MessageLookupByLibrary.simpleMessage('Import OPML'), - 'settings_label': MessageLookupByLibrary.simpleMessage('Settings'), - 'settings_mark_deleted_played_label': MessageLookupByLibrary.simpleMessage('Mark deleted episodes as played'), - 'settings_personalisation_divider_label': MessageLookupByLibrary.simpleMessage('PERSONALISATION'), - 'settings_playback_divider_label': MessageLookupByLibrary.simpleMessage('PLAYBACK'), - 'settings_theme_switch_label': MessageLookupByLibrary.simpleMessage('Dark theme'), - 'show_notes_label': MessageLookupByLibrary.simpleMessage('Show notes'), - 'sleep_episode_label': MessageLookupByLibrary.simpleMessage('End of episode'), - 'sleep_minute_label': m0, - 'sleep_off_label': MessageLookupByLibrary.simpleMessage('Off'), - 'sleep_timer_label': MessageLookupByLibrary.simpleMessage('Sleep Timer'), - 'stop_download_button_label': MessageLookupByLibrary.simpleMessage('Stop'), - 'stop_download_confirmation': MessageLookupByLibrary.simpleMessage('Are you sure you wish to stop this download and delete the episode?'), - 'stop_download_title': MessageLookupByLibrary.simpleMessage('Stop Download'), - 'subscribe_button_label': MessageLookupByLibrary.simpleMessage('Follow'), - 'subscribe_label': MessageLookupByLibrary.simpleMessage('Follow'), - 'transcript_label': MessageLookupByLibrary.simpleMessage('Transcript'), - 'transcript_why_not_label': MessageLookupByLibrary.simpleMessage('Why not?'), - 'transcript_why_not_url': MessageLookupByLibrary.simpleMessage('https://anytimeplayer.app/docs/anytime_transcript_support_en.html'), - 'unsubscribe_button_label': MessageLookupByLibrary.simpleMessage('Unfollow'), - 'unsubscribe_label': MessageLookupByLibrary.simpleMessage('Unfollow'), - 'unsubscribe_message': MessageLookupByLibrary.simpleMessage('Unfollowing will delete all downloaded episodes of this podcast.'), - 'up_next_queue_label': MessageLookupByLibrary.simpleMessage('Up Next') - }; + 'about_label': MessageLookupByLibrary.simpleMessage('About'), + 'add_rss_feed_option': + MessageLookupByLibrary.simpleMessage('Add RSS Feed'), + 'app_title': + MessageLookupByLibrary.simpleMessage('Pinepods Podcast Client'), + 'app_title_short': MessageLookupByLibrary.simpleMessage('Pinepods'), + 'audio_effect_trim_silence_label': + MessageLookupByLibrary.simpleMessage('Trim Silence'), + 'audio_effect_volume_boost_label': + MessageLookupByLibrary.simpleMessage('Volume Boost'), + 'audio_settings_playback_speed_label': + MessageLookupByLibrary.simpleMessage('Playback Speed'), + 'auto_scroll_transcript_label': + MessageLookupByLibrary.simpleMessage('Follow transcript'), + 'cancel_button_label': MessageLookupByLibrary.simpleMessage('Cancel'), + 'cancel_download_button_label': + MessageLookupByLibrary.simpleMessage('Cancel download'), + 'cancel_option_label': MessageLookupByLibrary.simpleMessage('Cancel'), + 'chapters_label': MessageLookupByLibrary.simpleMessage('Chapters'), + 'clear_queue_button_label': + MessageLookupByLibrary.simpleMessage('CLEAR QUEUE'), + 'clear_search_button_label': + MessageLookupByLibrary.simpleMessage('Clear search text'), + 'close_button_label': MessageLookupByLibrary.simpleMessage('Close'), + 'consent_message': MessageLookupByLibrary.simpleMessage( + 'This funding link will take you to an external site where you will be able to directly support the show. Links are provided by the podcast authors and is not controlled by Pinepods.'), + 'continue_button_label': + MessageLookupByLibrary.simpleMessage('Continue'), + 'delete_button_label': MessageLookupByLibrary.simpleMessage('Delete'), + 'delete_episode_button_label': + MessageLookupByLibrary.simpleMessage('Delete downloaded episode'), + 'delete_episode_confirmation': MessageLookupByLibrary.simpleMessage( + 'Are you sure you wish to delete this episode?'), + 'delete_episode_title': + MessageLookupByLibrary.simpleMessage('Delete Episode'), + 'delete_label': MessageLookupByLibrary.simpleMessage('Delete'), + 'discover': MessageLookupByLibrary.simpleMessage('Discover'), + 'discovery_categories_itunes': MessageLookupByLibrary.simpleMessage( + ',Arts,Business,Comedy,Education,Fiction,Government,Health & Fitness,History,Kids & Family,Leisure,Music,News,Religion & Spirituality,Science,Society & Culture,Sports,TV & Film,Technology,True Crime'), + 'discovery_categories_pindex': MessageLookupByLibrary.simpleMessage( + ',After-Shows,Alternative,Animals,Animation,Arts,Astronomy,Automotive,Aviation,Baseball,Basketball,Beauty,Books,Buddhism,Business,Careers,Chemistry,Christianity,Climate,Comedy,Commentary,Courses,Crafts,Cricket,Cryptocurrency,Culture,Daily,Design,Documentary,Drama,Earth,Education,Entertainment,Entrepreneurship,Family,Fantasy,Fashion,Fiction,Film,Fitness,Food,Football,Games,Garden,Golf,Government,Health,Hinduism,History,Hobbies,Hockey,Home,HowTo,Improv,Interviews,Investing,Islam,Journals,Judaism,Kids,Language,Learning,Leisure,Life,Management,Manga,Marketing,Mathematics,Medicine,Mental,Music,Natural,Nature,News,NonProfit,Nutrition,Parenting,Performing,Personal,Pets,Philosophy,Physics,Places,Politics,Relationships,Religion,Reviews,Role-Playing,Rugby,Running,Science,Self-Improvement,Sexuality,Soccer,Social,Society,Spirituality,Sports,Stand-Up,Stories,Swimming,TV,Tabletop,Technology,Tennis,Travel,True Crime,Video-Games,Visual,Volleyball,Weather,Wilderness,Wrestling'), + 'download_episode_button_label': + MessageLookupByLibrary.simpleMessage('Download episode'), + 'downloads': MessageLookupByLibrary.simpleMessage('Downloads'), + 'empty_queue_message': + MessageLookupByLibrary.simpleMessage('Your queue is empty'), + 'episode_details_button_label': + MessageLookupByLibrary.simpleMessage('Show episode information'), + 'episode_filter_clear_filters_button_label': + MessageLookupByLibrary.simpleMessage('Clear Filters'), + 'episode_filter_no_episodes_title_description': + MessageLookupByLibrary.simpleMessage( + 'This podcast has no episodes matching your search criteria and filter'), + 'episode_filter_no_episodes_title_label': + MessageLookupByLibrary.simpleMessage('No Episodes Found'), + 'episode_filter_none_label': + MessageLookupByLibrary.simpleMessage('None'), + 'episode_filter_played_label': + MessageLookupByLibrary.simpleMessage('Played'), + 'episode_filter_semantic_label': + MessageLookupByLibrary.simpleMessage('Filter episodes'), + 'episode_filter_started_label': + MessageLookupByLibrary.simpleMessage('Started'), + 'episode_filter_unplayed_label': + MessageLookupByLibrary.simpleMessage('Unplayed'), + 'episode_label': MessageLookupByLibrary.simpleMessage('Episode'), + 'episode_sort_alphabetical_ascending_label': + MessageLookupByLibrary.simpleMessage('Alphabetical A-Z'), + 'episode_sort_alphabetical_descending_label': + MessageLookupByLibrary.simpleMessage('Alphabetical Z-A'), + 'episode_sort_earliest_first_label': + MessageLookupByLibrary.simpleMessage('Earliest first'), + 'episode_sort_latest_first_label': + MessageLookupByLibrary.simpleMessage('Latest first'), + 'episode_sort_none_label': + MessageLookupByLibrary.simpleMessage('Default'), + 'episode_sort_semantic_label': + MessageLookupByLibrary.simpleMessage('Sort episodes'), + 'error_no_connection': MessageLookupByLibrary.simpleMessage( + 'Unable to play episode. Please check your connection and try again.'), + 'error_playback_fail': MessageLookupByLibrary.simpleMessage( + 'An unexpected error occurred during playback. Please check your connection and try again.'), + 'fast_forward_button_label': MessageLookupByLibrary.simpleMessage( + 'Fast-forward episode 30 seconds'), + 'feedback_menu_item_label': + MessageLookupByLibrary.simpleMessage('Feedback'), + 'go_back_button_label': MessageLookupByLibrary.simpleMessage('Go Back'), + 'label_opml_importing': + MessageLookupByLibrary.simpleMessage('Importing'), + 'layout_label': MessageLookupByLibrary.simpleMessage('Layout'), + 'library': MessageLookupByLibrary.simpleMessage('Library'), + 'mark_episodes_not_played_label': MessageLookupByLibrary.simpleMessage( + 'Mark all episodes as not played'), + 'mark_episodes_played_label': + MessageLookupByLibrary.simpleMessage('Mark all episodes as played'), + 'mark_played_label': + MessageLookupByLibrary.simpleMessage('Mark Played'), + 'mark_unplayed_label': + MessageLookupByLibrary.simpleMessage('Mark Unplayed'), + 'minimise_player_window_button_label': + MessageLookupByLibrary.simpleMessage('Minimise player window'), + 'more_label': MessageLookupByLibrary.simpleMessage('More'), + 'new_episodes_label': + MessageLookupByLibrary.simpleMessage('New episodes are available'), + 'new_episodes_view_now_label': + MessageLookupByLibrary.simpleMessage('VIEW NOW'), + 'no_downloads_message': MessageLookupByLibrary.simpleMessage( + 'You do not have any downloaded episodes'), + 'no_podcast_details_message': MessageLookupByLibrary.simpleMessage( + 'Could not load podcast episodes. Please check your connection.'), + 'no_search_results_message': + MessageLookupByLibrary.simpleMessage('No podcasts found'), + 'no_subscriptions_message': MessageLookupByLibrary.simpleMessage( + 'Head to Settings to Connect a Pinepods Server if you haven\'t yet!'), + 'no_transcript_available_label': MessageLookupByLibrary.simpleMessage( + 'A transcript is not available for this podcast'), + 'notes_label': MessageLookupByLibrary.simpleMessage('Description'), + 'now_playing_episode_position': + MessageLookupByLibrary.simpleMessage('Episode position'), + 'now_playing_episode_time_remaining': + MessageLookupByLibrary.simpleMessage('Time remaining'), + 'now_playing_queue_label': + MessageLookupByLibrary.simpleMessage('Now Playing'), + 'ok_button_label': MessageLookupByLibrary.simpleMessage('OK'), + 'open_show_website_label': + MessageLookupByLibrary.simpleMessage('Open show website'), + 'opml_export_button_label': + MessageLookupByLibrary.simpleMessage('Export'), + 'opml_import_button_label': + MessageLookupByLibrary.simpleMessage('Import'), + 'opml_import_export_label': + MessageLookupByLibrary.simpleMessage('OPML Import/Export'), + 'pause_button_label': + MessageLookupByLibrary.simpleMessage('Pause episode'), + 'play_button_label': + MessageLookupByLibrary.simpleMessage('Play episode'), + 'play_download_button_label': + MessageLookupByLibrary.simpleMessage('Play downloaded episode'), + 'playback_speed_label': + MessageLookupByLibrary.simpleMessage('Playback speed'), + 'podcast_funding_dialog_header': + MessageLookupByLibrary.simpleMessage('Podcast Funding'), + 'podcast_options_overflow_menu_semantic_label': + MessageLookupByLibrary.simpleMessage('Options menu'), + 'queue_add_label': MessageLookupByLibrary.simpleMessage('Add'), + 'queue_clear_button_label': + MessageLookupByLibrary.simpleMessage('Clear'), + 'queue_clear_label': MessageLookupByLibrary.simpleMessage( + 'Are you sure you wish to clear the queue?'), + 'queue_clear_label_title': + MessageLookupByLibrary.simpleMessage('Clear Queue'), + 'queue_remove_label': MessageLookupByLibrary.simpleMessage('Remove'), + 'refresh_feed_label': + MessageLookupByLibrary.simpleMessage('Refresh episodes'), + 'resume_button_label': + MessageLookupByLibrary.simpleMessage('Resume episode'), + 'rewind_button_label': + MessageLookupByLibrary.simpleMessage('Rewind episode 10 seconds'), + 'scrim_episode_details_selector': + MessageLookupByLibrary.simpleMessage('Dismiss episode details'), + 'scrim_episode_filter_selector': + MessageLookupByLibrary.simpleMessage('Dismiss episode filter'), + 'scrim_episode_sort_selector': + MessageLookupByLibrary.simpleMessage('Dismiss episode sort'), + 'scrim_layout_selector': + MessageLookupByLibrary.simpleMessage('Dismiss layout selector'), + 'scrim_sleep_timer_selector': MessageLookupByLibrary.simpleMessage( + 'Dismiss sleep timer selector'), + 'scrim_speed_selector': MessageLookupByLibrary.simpleMessage( + 'Dismiss playback speed selector'), + 'search_back_button_label': + MessageLookupByLibrary.simpleMessage('Back'), + 'search_button_label': MessageLookupByLibrary.simpleMessage('Search'), + 'search_episodes_label': + MessageLookupByLibrary.simpleMessage('Search episodes'), + 'search_for_podcasts_hint': + MessageLookupByLibrary.simpleMessage('Search for podcasts'), + 'search_provider_label': + MessageLookupByLibrary.simpleMessage('Search provider'), + 'search_transcript_label': + MessageLookupByLibrary.simpleMessage('Search transcript'), + 'semantic_announce_searching': + MessageLookupByLibrary.simpleMessage('Searching, please wait.'), + 'semantic_chapter_link_label': + MessageLookupByLibrary.simpleMessage('Chapter web link'), + 'semantic_current_chapter_label': + MessageLookupByLibrary.simpleMessage('Current chapter'), + 'semantic_current_value_label': + MessageLookupByLibrary.simpleMessage('Current value'), + 'semantic_playing_options_collapse_label': + MessageLookupByLibrary.simpleMessage( + 'Close playing options slider'), + 'semantic_playing_options_expand_label': + MessageLookupByLibrary.simpleMessage('Open playing options slider'), + 'semantic_podcast_artwork_label': + MessageLookupByLibrary.simpleMessage('Podcast artwork'), + 'semantics_add_to_queue': + MessageLookupByLibrary.simpleMessage('Add episode to queue'), + 'semantics_collapse_podcast_description': + MessageLookupByLibrary.simpleMessage( + 'Collapse podcast description'), + 'semantics_decrease_playback_speed': + MessageLookupByLibrary.simpleMessage('Decrease playback speed'), + 'semantics_episode_tile_collapsed': + MessageLookupByLibrary.simpleMessage( + 'Episode list item. Showing image, summary and main controls.'), + 'semantics_episode_tile_collapsed_hint': + MessageLookupByLibrary.simpleMessage( + 'expand and show more details and additional options'), + 'semantics_episode_tile_expanded': MessageLookupByLibrary.simpleMessage( + 'Episode list item. Showing description, main controls and additional controls.'), + 'semantics_episode_tile_expanded_hint': + MessageLookupByLibrary.simpleMessage( + 'collapse and show summary, download and play control'), + 'semantics_expand_podcast_description': + MessageLookupByLibrary.simpleMessage('Expand podcast description'), + 'semantics_increase_playback_speed': + MessageLookupByLibrary.simpleMessage('Increase playback speed'), + 'semantics_layout_option_compact_grid': + MessageLookupByLibrary.simpleMessage('Compact grid layout'), + 'semantics_layout_option_grid': + MessageLookupByLibrary.simpleMessage('Grid layout'), + 'semantics_layout_option_list': + MessageLookupByLibrary.simpleMessage('List layout'), + 'semantics_main_player_header': + MessageLookupByLibrary.simpleMessage('Main player window'), + 'semantics_mark_episode_played': + MessageLookupByLibrary.simpleMessage('Mark Episode as played'), + 'semantics_mark_episode_unplayed': + MessageLookupByLibrary.simpleMessage('Mark Episode as un-played'), + 'semantics_mini_player_header': MessageLookupByLibrary.simpleMessage( + 'Mini player. Swipe right to play/pause button. Activate to open main player window'), + 'semantics_play_pause_toggle': + MessageLookupByLibrary.simpleMessage('Play/pause toggle'), + 'semantics_podcast_details_header': + MessageLookupByLibrary.simpleMessage( + 'Podcast details and episodes page'), + 'semantics_remove_from_queue': + MessageLookupByLibrary.simpleMessage('Remove episode from queue'), + 'settings_auto_open_now_playing': MessageLookupByLibrary.simpleMessage( + 'Full screen player mode on episode start'), + 'settings_auto_update_episodes': + MessageLookupByLibrary.simpleMessage('Auto update episodes'), + 'settings_auto_update_episodes_10min': + MessageLookupByLibrary.simpleMessage( + '10 minutes since last update'), + 'settings_auto_update_episodes_12hour': + MessageLookupByLibrary.simpleMessage('12 hours since last update'), + 'settings_auto_update_episodes_1hour': + MessageLookupByLibrary.simpleMessage('1 hour since last update'), + 'settings_auto_update_episodes_30min': + MessageLookupByLibrary.simpleMessage( + '30 minutes since last update'), + 'settings_auto_update_episodes_3hour': + MessageLookupByLibrary.simpleMessage('3 hours since last update'), + 'settings_auto_update_episodes_6hour': + MessageLookupByLibrary.simpleMessage('6 hours since last update'), + 'settings_auto_update_episodes_always': + MessageLookupByLibrary.simpleMessage('Always'), + 'settings_auto_update_episodes_heading': + MessageLookupByLibrary.simpleMessage( + 'Refresh episodes on details screen after'), + 'settings_auto_update_episodes_never': + MessageLookupByLibrary.simpleMessage('Never'), + 'settings_data_divider_label': + MessageLookupByLibrary.simpleMessage('DATA'), + 'settings_delete_played_label': MessageLookupByLibrary.simpleMessage( + 'Delete downloaded episodes once played'), + 'settings_download_sd_card_label': MessageLookupByLibrary.simpleMessage( + 'Download episodes to SD card'), + 'settings_download_switch_card': MessageLookupByLibrary.simpleMessage( + 'New downloads will be saved to the SD card. Existing downloads will remain on internal storage.'), + 'settings_download_switch_internal': MessageLookupByLibrary.simpleMessage( + 'New downloads will be saved to internal storage. Existing downloads will remain on the SD card.'), + 'settings_download_switch_label': + MessageLookupByLibrary.simpleMessage('Change storage location'), + 'settings_episodes_divider_label': + MessageLookupByLibrary.simpleMessage('EPISODES'), + 'settings_export_opml': + MessageLookupByLibrary.simpleMessage('Export OPML'), + 'settings_import_opml': + MessageLookupByLibrary.simpleMessage('Import OPML'), + 'settings_label': MessageLookupByLibrary.simpleMessage('Settings'), + 'settings_mark_deleted_played_label': + MessageLookupByLibrary.simpleMessage( + 'Mark deleted episodes as played'), + 'settings_personalisation_divider_label': + MessageLookupByLibrary.simpleMessage('Personalisation'), + 'settings_playback_divider_label': + MessageLookupByLibrary.simpleMessage('Playback'), + 'settings_theme_switch_label': + MessageLookupByLibrary.simpleMessage('Dark theme'), + 'show_notes_label': MessageLookupByLibrary.simpleMessage('Show notes'), + 'sleep_episode_label': + MessageLookupByLibrary.simpleMessage('End of episode'), + 'sleep_minute_label': m0, + 'sleep_off_label': MessageLookupByLibrary.simpleMessage('Off'), + 'sleep_timer_label': + MessageLookupByLibrary.simpleMessage('Sleep Timer'), + 'stop_download_button_label': + MessageLookupByLibrary.simpleMessage('Stop'), + 'stop_download_confirmation': MessageLookupByLibrary.simpleMessage( + 'Are you sure you wish to stop this download and delete the episode?'), + 'stop_download_title': + MessageLookupByLibrary.simpleMessage('Stop Download'), + 'subscribe_button_label': + MessageLookupByLibrary.simpleMessage('Follow'), + 'subscribe_label': MessageLookupByLibrary.simpleMessage('Follow'), + 'transcript_label': MessageLookupByLibrary.simpleMessage('Transcript'), + 'transcript_why_not_label': + MessageLookupByLibrary.simpleMessage('Why not?'), + 'transcript_why_not_url': MessageLookupByLibrary.simpleMessage( + 'https://www.pinepods.online/docs/Features/Transcript'), + 'unsubscribe_button_label': + MessageLookupByLibrary.simpleMessage('Unfollow'), + 'unsubscribe_label': MessageLookupByLibrary.simpleMessage('Unfollow'), + 'unsubscribe_message': MessageLookupByLibrary.simpleMessage( + 'Unfollowing will delete all downloaded episodes of this podcast.'), + 'up_next_queue_label': MessageLookupByLibrary.simpleMessage('Up Next') + }; } diff --git a/mobile/lib/l10n/messages_it.dart b/mobile/lib/l10n/messages_it.dart index 176309b6..209fc1bb 100644 --- a/mobile/lib/l10n/messages_it.dart +++ b/mobile/lib/l10n/messages_it.dart @@ -14,8 +14,7 @@ import 'package:intl/message_lookup_by_library.dart'; final messages = MessageLookup(); -typedef String? MessageIfAbsent( - String? messageStr, List? args); +typedef String? MessageIfAbsent(String? messageStr, List? args); class MessageLookup extends MessageLookupByLibrary { @override @@ -24,176 +23,337 @@ class MessageLookup extends MessageLookupByLibrary { static m0(minutes) => "${minutes} minuti"; @override - final Map messages = _notInlinedMessages(_notInlinedMessages); + final Map messages = + _notInlinedMessages(_notInlinedMessages); static Map _notInlinedMessages(_) => { - 'about_label': MessageLookupByLibrary.simpleMessage('Info'), - 'add_rss_feed_option': MessageLookupByLibrary.simpleMessage('Aggiungi un Feed RSS'), - 'app_title': MessageLookupByLibrary.simpleMessage('Anytime Podcast Player'), - 'app_title_short': MessageLookupByLibrary.simpleMessage('Pinepods'), - 'audio_effect_trim_silence_label': MessageLookupByLibrary.simpleMessage('Rimuovi Silenzio'), - 'audio_effect_volume_boost_label': MessageLookupByLibrary.simpleMessage('Incrementa Volume'), - 'audio_settings_playback_speed_label': MessageLookupByLibrary.simpleMessage('Velocità Riproduzione'), - 'auto_scroll_transcript_label': MessageLookupByLibrary.simpleMessage('Trascrizione sincronizzata'), - 'cancel_button_label': MessageLookupByLibrary.simpleMessage('Annulla'), - 'cancel_download_button_label': MessageLookupByLibrary.simpleMessage('Annulla il download'), - 'cancel_option_label': MessageLookupByLibrary.simpleMessage('Annulla'), - 'chapters_label': MessageLookupByLibrary.simpleMessage('Capitoli'), - 'clear_queue_button_label': MessageLookupByLibrary.simpleMessage('PULISCI CODA'), - 'clear_search_button_label': MessageLookupByLibrary.simpleMessage('Pulisci il campo di ricerca'), - 'close_button_label': MessageLookupByLibrary.simpleMessage('Chiudi'), - 'consent_message': MessageLookupByLibrary.simpleMessage('Questo link per la ricerca fondi ti porterà a un sito esterno dove avrai la possibilità di supportare direttamente questo show. I link sono forniti dagli autori del podcast e non sono verificati da Anytime.'), - 'continue_button_label': MessageLookupByLibrary.simpleMessage('Continua'), - 'delete_button_label': MessageLookupByLibrary.simpleMessage('Elimina'), - 'delete_episode_button_label': MessageLookupByLibrary.simpleMessage('Elimina episodio scaricato'), - 'delete_episode_confirmation': MessageLookupByLibrary.simpleMessage('Sicura/o di voler eliminare questo episodio?'), - 'delete_episode_title': MessageLookupByLibrary.simpleMessage('Elimina Episodio'), - 'delete_label': MessageLookupByLibrary.simpleMessage('Elimina'), - 'discover': MessageLookupByLibrary.simpleMessage('Scopri'), - 'discovery_categories_itunes': MessageLookupByLibrary.simpleMessage(',Arte,Business,Commedia,Educazione,Fiction,Governativi,Salute e Benessere,Storia,Bambini e Famiglia,Tempo Libero,Musica,Notizie,Religione e Spiritualità,Scienza,Società e Cultura,Sport,TV e Film,Tecnologia,True Crime'), - 'discovery_categories_pindex': MessageLookupByLibrary.simpleMessage(',Dopo-Spettacolo,Alternativi,Animali,Animazione,Arte,Astronomia,Automotive,Aviazione,Baseball,Pallacanestro,Bellezza,Libri,Buddismo,Business,Carriera,Chimica,Cristianità,Clima,Commedia,Commenti,Corsi,Artigianato,Cricket,Cryptocurrency,Cultura,Giornalieri,Design,Documentari,Dramma,Terra,Educazione,Intrattenimento,Imprenditoria,Famiglia,Fantasy,Fashion,Fiction,Film,Fitness,Cibo,Football,Giochi,Giardinaggio,Golf,Governativi,Salute,Induismo,Storia,Hobbies,Hockey,Casa,Come Fare,Improvvisazione,Interviste,Investimenti,Islam,Giornalismo,Giudaismo,Bambini,Lingue,Apprendimento,Tempo-Libero,Stili di Vita,Gestione,Manga,Marketing,Matematica,Medicina,Mentale,Musica,Naturale,Natura,Notizie,NonProfit,Nutrizione,Genitorialità,Esecuzione,Personale,Animali-Domestici,Filosofia,Fisica,Posti,Politica,Relazioni,Religione,Recensioni,Giochi-di-Ruolo,Rugby,Corsa,Scienza,Miglioramento-Personale,Sessualità,Calcio,Social,Società,Spiritualità,Sports,Stand-Up,Storie,Nuoto,TV,Tabletop,Tecnologia,Tennis,Viaggi,True Crime,Video-Giochi,Visivo,Pallavolo,Meteo,Natura-Selvaggia,Wrestling'), - 'download_episode_button_label': MessageLookupByLibrary.simpleMessage('Scarica episodio'), - 'downloads': MessageLookupByLibrary.simpleMessage('Scaricati'), - 'empty_queue_message': MessageLookupByLibrary.simpleMessage('La tua coda è vuota'), - 'episode_details_button_label': MessageLookupByLibrary.simpleMessage('Mostra le informazioni sull\'episodio'), - 'episode_filter_clear_filters_button_label': MessageLookupByLibrary.simpleMessage('Pulisci i Filtri'), - 'episode_filter_no_episodes_title_description': MessageLookupByLibrary.simpleMessage('Questo podcast non ha episodi che corrispondono ai tuoi criteri di ricerca e filtro'), - 'episode_filter_no_episodes_title_label': MessageLookupByLibrary.simpleMessage('Nessun episodio trovato'), - 'episode_filter_none_label': MessageLookupByLibrary.simpleMessage('Nessuno'), - 'episode_filter_played_label': MessageLookupByLibrary.simpleMessage('Riprodotto'), - 'episode_filter_semantic_label': MessageLookupByLibrary.simpleMessage('Filtra gli episodi'), - 'episode_filter_started_label': MessageLookupByLibrary.simpleMessage('Avviato'), - 'episode_filter_unplayed_label': MessageLookupByLibrary.simpleMessage('Non riprodotto'), - 'episode_label': MessageLookupByLibrary.simpleMessage('Episodio'), - 'episode_sort_alphabetical_ascending_label': MessageLookupByLibrary.simpleMessage('Ordine Alfabetico A-Z'), - 'episode_sort_alphabetical_descending_label': MessageLookupByLibrary.simpleMessage('Ordine Alfabetico Z-A'), - 'episode_sort_earliest_first_label': MessageLookupByLibrary.simpleMessage('I più vecchi'), - 'episode_sort_latest_first_label': MessageLookupByLibrary.simpleMessage('Gli ultimi'), - 'episode_sort_none_label': MessageLookupByLibrary.simpleMessage('Default'), - 'episode_sort_semantic_label': MessageLookupByLibrary.simpleMessage('Ordina gli episodi'), - 'error_no_connection': MessageLookupByLibrary.simpleMessage('Impossibile riprodurre l\'episodio. Per favore, verifica la tua connessione e prova di nuovo.'), - 'error_playback_fail': MessageLookupByLibrary.simpleMessage('Sì è verificato un errore inatteso durante la riproduzione. Per favore, verifica la tua connessione e prova di nuovo.'), - 'fast_forward_button_label': MessageLookupByLibrary.simpleMessage('Manda avanti di 30 secondi'), - 'feedback_menu_item_label': MessageLookupByLibrary.simpleMessage('Feedback'), - 'go_back_button_label': MessageLookupByLibrary.simpleMessage('Torna indietro'), - 'label_opml_importing': MessageLookupByLibrary.simpleMessage('Importazione in corso'), - 'layout_label': MessageLookupByLibrary.simpleMessage('Layout'), - 'library': MessageLookupByLibrary.simpleMessage('Libreria'), - 'mark_episodes_not_played_label': MessageLookupByLibrary.simpleMessage('Marca tutti gli episodi come non riprodotti'), - 'mark_episodes_played_label': MessageLookupByLibrary.simpleMessage('Marca tutti gli episodi come riprodotti'), - 'mark_played_label': MessageLookupByLibrary.simpleMessage('Marca Riprodotto'), - 'mark_unplayed_label': MessageLookupByLibrary.simpleMessage('Marca da Riprodurre'), - 'minimise_player_window_button_label': MessageLookupByLibrary.simpleMessage('Minimizza la finestra del player'), - 'more_label': MessageLookupByLibrary.simpleMessage('Di Più'), - 'new_episodes_label': MessageLookupByLibrary.simpleMessage('Nuovi episodi sono disponibili'), - 'new_episodes_view_now_label': MessageLookupByLibrary.simpleMessage('VEDI ORA'), - 'no_downloads_message': MessageLookupByLibrary.simpleMessage('Non hai nessun episodio scaricato'), - 'no_podcast_details_message': MessageLookupByLibrary.simpleMessage('Non è possibile caricare gli episodi. Verifica la tua connessione, per favore.'), - 'no_search_results_message': MessageLookupByLibrary.simpleMessage('Nessun podcast trovato'), - 'no_subscriptions_message': MessageLookupByLibrary.simpleMessage('Tappa il pulsante di ricerca sottostante o usa la barra di ricerca per trovare il tuo primo podcast'), - 'no_transcript_available_label': MessageLookupByLibrary.simpleMessage('Nessuna trascrizione disponibile per questo podcast'), - 'notes_label': MessageLookupByLibrary.simpleMessage('Note'), - 'now_playing_episode_position': MessageLookupByLibrary.simpleMessage('Posizione dell\'episodio'), - 'now_playing_episode_time_remaining': MessageLookupByLibrary.simpleMessage('Tempo rimanente'), - 'now_playing_queue_label': MessageLookupByLibrary.simpleMessage('In Riproduzione'), - 'ok_button_label': MessageLookupByLibrary.simpleMessage('OK'), - 'open_show_website_label': MessageLookupByLibrary.simpleMessage('Vai al sito web dello show'), - 'opml_export_button_label': MessageLookupByLibrary.simpleMessage('Esporta'), - 'opml_import_button_label': MessageLookupByLibrary.simpleMessage('Importa'), - 'opml_import_export_label': MessageLookupByLibrary.simpleMessage('OPML Importa/Esporta'), - 'pause_button_label': MessageLookupByLibrary.simpleMessage('Sospendi episodio'), - 'play_button_label': MessageLookupByLibrary.simpleMessage('Riproduci episodio'), - 'play_download_button_label': MessageLookupByLibrary.simpleMessage('Riproduci l\'episodio scaricato'), - 'playback_speed_label': MessageLookupByLibrary.simpleMessage('Velocità di riproduzione'), - 'podcast_funding_dialog_header': MessageLookupByLibrary.simpleMessage('Podcast Fondi'), - 'podcast_options_overflow_menu_semantic_label': MessageLookupByLibrary.simpleMessage('Menu opzioni'), - 'queue_add_label': MessageLookupByLibrary.simpleMessage('Aggiungi'), - 'queue_clear_button_label': MessageLookupByLibrary.simpleMessage('Svuota'), - 'queue_clear_label': MessageLookupByLibrary.simpleMessage('Sicuro/a di voler ripulire la coda?'), - 'queue_clear_label_title': MessageLookupByLibrary.simpleMessage('Svuota la Coda'), - 'queue_remove_label': MessageLookupByLibrary.simpleMessage('Rimuovi'), - 'refresh_feed_label': MessageLookupByLibrary.simpleMessage('Recupera nuovi episodi'), - 'resume_button_label': MessageLookupByLibrary.simpleMessage('Riprendi episodio'), - 'rewind_button_label': MessageLookupByLibrary.simpleMessage('Riavvolgi di 10 secondi'), - 'scrim_episode_details_selector': MessageLookupByLibrary.simpleMessage('Chiudi i dettagli dell\'episodio'), - 'scrim_episode_filter_selector': MessageLookupByLibrary.simpleMessage('Chiudi il filtro degli episodi'), - 'scrim_episode_sort_selector': MessageLookupByLibrary.simpleMessage('Chiudi ordinamento degli episodi'), - 'scrim_layout_selector': MessageLookupByLibrary.simpleMessage('Chiudi il selettore del layout'), - 'scrim_sleep_timer_selector': MessageLookupByLibrary.simpleMessage('Chiudere il selettore del timer di spegnimento'), - 'scrim_speed_selector': MessageLookupByLibrary.simpleMessage('Chiudere il selettore della velocità di riproduzione'), - 'search_back_button_label': MessageLookupByLibrary.simpleMessage('Indietro'), - 'search_button_label': MessageLookupByLibrary.simpleMessage('Cerca'), - 'search_episodes_label': MessageLookupByLibrary.simpleMessage('Cerca episodi'), - 'search_for_podcasts_hint': MessageLookupByLibrary.simpleMessage('Ricerca dei podcasts'), - 'search_provider_label': MessageLookupByLibrary.simpleMessage('Provider di ricerca'), - 'search_transcript_label': MessageLookupByLibrary.simpleMessage('Cerca trascrizione'), - 'semantic_announce_searching': MessageLookupByLibrary.simpleMessage('Ricerca in corso, attender prego.'), - 'semantic_chapter_link_label': MessageLookupByLibrary.simpleMessage('Web link al capitolo'), - 'semantic_current_chapter_label': MessageLookupByLibrary.simpleMessage('Capitolo attuale'), - 'semantic_current_value_label': MessageLookupByLibrary.simpleMessage('Impostazioni correnti'), - 'semantic_playing_options_collapse_label': MessageLookupByLibrary.simpleMessage('Chiudere il cursore delle opzioni di riproduzione'), - 'semantic_playing_options_expand_label': MessageLookupByLibrary.simpleMessage('Aprire il cursore delle opzioni di riproduzione'), - 'semantic_podcast_artwork_label': MessageLookupByLibrary.simpleMessage('Podcast artwork'), - 'semantics_add_to_queue': MessageLookupByLibrary.simpleMessage('Aggiungi episodio alla coda'), - 'semantics_collapse_podcast_description': MessageLookupByLibrary.simpleMessage('Collassa la descrizione del podcast'), - 'semantics_decrease_playback_speed': MessageLookupByLibrary.simpleMessage('Rallenta la riproduzione'), - 'semantics_episode_tile_collapsed': MessageLookupByLibrary.simpleMessage('Voce dell\'elenco degli episodi. Visualizza immagine, sommario e i controlli principali.'), - 'semantics_episode_tile_collapsed_hint': MessageLookupByLibrary.simpleMessage('espandi e visualizza più dettagli e opzioni aggiuntive'), - 'semantics_episode_tile_expanded': MessageLookupByLibrary.simpleMessage('Voce dell\'elenco degli episodi. Visualizza descrizione, controlli principali e controlli aggiuntivi.'), - 'semantics_episode_tile_expanded_hint': MessageLookupByLibrary.simpleMessage('collassa e visualizza il sommario, download e controlli di riproduzione'), - 'semantics_expand_podcast_description': MessageLookupByLibrary.simpleMessage('Espandi la descrizione del podcast'), - 'semantics_increase_playback_speed': MessageLookupByLibrary.simpleMessage('Incrementa la riproduzione'), - 'semantics_layout_option_compact_grid': MessageLookupByLibrary.simpleMessage('Griglia compatta'), - 'semantics_layout_option_grid': MessageLookupByLibrary.simpleMessage('Griglia'), - 'semantics_layout_option_list': MessageLookupByLibrary.simpleMessage('Lista'), - 'semantics_main_player_header': MessageLookupByLibrary.simpleMessage('Finestra principale del player'), - 'semantics_mark_episode_played': MessageLookupByLibrary.simpleMessage('Marca Episodio come riprodotto'), - 'semantics_mark_episode_unplayed': MessageLookupByLibrary.simpleMessage('Marca Episodio come non-riprodotto'), - 'semantics_mini_player_header': MessageLookupByLibrary.simpleMessage('Mini player. Swipe a destra per riprodurre/mettere in pausa. Attivare per aprire la finestra principale del player'), - 'semantics_play_pause_toggle': MessageLookupByLibrary.simpleMessage('Play/pause toggle'), - 'semantics_podcast_details_header': MessageLookupByLibrary.simpleMessage('Podcast pagina dettagli ed episodi'), - 'semantics_remove_from_queue': MessageLookupByLibrary.simpleMessage('Rimuovi episodio dalla coda'), - 'settings_auto_open_now_playing': MessageLookupByLibrary.simpleMessage('Player a tutto schermo quando l\'episodio inizia'), - 'settings_auto_update_episodes': MessageLookupByLibrary.simpleMessage('Aggiorna automaticamente gli episodi'), - 'settings_auto_update_episodes_10min': MessageLookupByLibrary.simpleMessage('10 minuti dall\'ultimo aggiornamento'), - 'settings_auto_update_episodes_12hour': MessageLookupByLibrary.simpleMessage('12 ore dall\'ultimo aggiornamento'), - 'settings_auto_update_episodes_1hour': MessageLookupByLibrary.simpleMessage('1 ora dall\'ultimo aggiornamento'), - 'settings_auto_update_episodes_30min': MessageLookupByLibrary.simpleMessage('30 minuti dall\'ultimo aggiornamento'), - 'settings_auto_update_episodes_3hour': MessageLookupByLibrary.simpleMessage('3 ore dall\'ultimo aggiornamento'), - 'settings_auto_update_episodes_6hour': MessageLookupByLibrary.simpleMessage('6 ore dall\'ultimo aggiornamento'), - 'settings_auto_update_episodes_always': MessageLookupByLibrary.simpleMessage('Sempre'), - 'settings_auto_update_episodes_heading': MessageLookupByLibrary.simpleMessage('Aggiorna gli episodi nella schermata successiva'), - 'settings_auto_update_episodes_never': MessageLookupByLibrary.simpleMessage('Mai'), - 'settings_data_divider_label': MessageLookupByLibrary.simpleMessage('DATI'), - 'settings_delete_played_label': MessageLookupByLibrary.simpleMessage('Elimina gli episodi scaricati una volta riprodotti'), - 'settings_download_sd_card_label': MessageLookupByLibrary.simpleMessage('Scarica gli episodi nella card SD'), - 'settings_download_switch_card': MessageLookupByLibrary.simpleMessage('I nuovi downloads saranno salvati nella card SD. I downloads esistenti rimarranno nello storage interno.'), - 'settings_download_switch_internal': MessageLookupByLibrary.simpleMessage('I nuovi downloads saranno salvati nello storage interno. I downloads esistenti rimarranno nella card SD.'), - 'settings_download_switch_label': MessageLookupByLibrary.simpleMessage('Cambia la posizione per lo storage'), - 'settings_episodes_divider_label': MessageLookupByLibrary.simpleMessage('EPISODI'), - 'settings_export_opml': MessageLookupByLibrary.simpleMessage('Esporta OPML'), - 'settings_import_opml': MessageLookupByLibrary.simpleMessage('Importa OPML'), - 'settings_label': MessageLookupByLibrary.simpleMessage('Impostazioni'), - 'settings_mark_deleted_played_label': MessageLookupByLibrary.simpleMessage('Marca gli episodi eliminati come riprodotti'), - 'settings_personalisation_divider_label': MessageLookupByLibrary.simpleMessage('PERSONALIZZAZIONI'), - 'settings_playback_divider_label': MessageLookupByLibrary.simpleMessage('RIPRODUZIONE'), - 'settings_theme_switch_label': MessageLookupByLibrary.simpleMessage('Tema scuro'), - 'show_notes_label': MessageLookupByLibrary.simpleMessage('Visualizza le note'), - 'sleep_episode_label': MessageLookupByLibrary.simpleMessage('Fine dell\'episodio'), - 'sleep_minute_label': m0, - 'sleep_off_label': MessageLookupByLibrary.simpleMessage('Off'), - 'sleep_timer_label': MessageLookupByLibrary.simpleMessage('Timer di Riposo'), - 'stop_download_button_label': MessageLookupByLibrary.simpleMessage('Stop'), - 'stop_download_confirmation': MessageLookupByLibrary.simpleMessage('Sicura/o di voler fermare il download ed eliminare l\'episodio?'), - 'stop_download_title': MessageLookupByLibrary.simpleMessage('Stop Download'), - 'subscribe_button_label': MessageLookupByLibrary.simpleMessage('Segui'), - 'subscribe_label': MessageLookupByLibrary.simpleMessage('Segui'), - 'transcript_label': MessageLookupByLibrary.simpleMessage('Trascrizioni'), - 'transcript_why_not_label': MessageLookupByLibrary.simpleMessage('Perché no?'), - 'transcript_why_not_url': MessageLookupByLibrary.simpleMessage('https://anytimeplayer.app/docs/anytime_transcript_support_en.html'), - 'unsubscribe_button_label': MessageLookupByLibrary.simpleMessage('Non Seguire'), - 'unsubscribe_label': MessageLookupByLibrary.simpleMessage('Smetti di seguire'), - 'unsubscribe_message': MessageLookupByLibrary.simpleMessage('Smettendo di seguire questo podcast, tutti gli episodi scaricati verranno eliminati.'), - 'up_next_queue_label': MessageLookupByLibrary.simpleMessage('Vai al Prossimo') - }; + 'about_label': MessageLookupByLibrary.simpleMessage('Info'), + 'add_rss_feed_option': + MessageLookupByLibrary.simpleMessage('Aggiungi un Feed RSS'), + 'app_title': MessageLookupByLibrary.simpleMessage('Pinepods'), + 'app_title_short': MessageLookupByLibrary.simpleMessage('Pinepods'), + 'audio_effect_trim_silence_label': + MessageLookupByLibrary.simpleMessage('Rimuovi Silenzio'), + 'audio_effect_volume_boost_label': + MessageLookupByLibrary.simpleMessage('Incrementa Volume'), + 'audio_settings_playback_speed_label': + MessageLookupByLibrary.simpleMessage('Velocità Riproduzione'), + 'auto_scroll_transcript_label': + MessageLookupByLibrary.simpleMessage('Trascrizione sincronizzata'), + 'cancel_button_label': MessageLookupByLibrary.simpleMessage('Annulla'), + 'cancel_download_button_label': + MessageLookupByLibrary.simpleMessage('Annulla il download'), + 'cancel_option_label': MessageLookupByLibrary.simpleMessage('Annulla'), + 'chapters_label': MessageLookupByLibrary.simpleMessage('Capitoli'), + 'clear_queue_button_label': + MessageLookupByLibrary.simpleMessage('PULISCI CODA'), + 'clear_search_button_label': + MessageLookupByLibrary.simpleMessage('Pulisci il campo di ricerca'), + 'close_button_label': MessageLookupByLibrary.simpleMessage('Chiudi'), + 'consent_message': MessageLookupByLibrary.simpleMessage( + 'Questo link per la ricerca fondi ti porterà a un sito esterno dove avrai la possibilità di supportare direttamente questo show. I link sono forniti dagli autori del podcast e non sono verificati da Pinepods.'), + 'continue_button_label': + MessageLookupByLibrary.simpleMessage('Continua'), + 'delete_button_label': MessageLookupByLibrary.simpleMessage('Elimina'), + 'delete_episode_button_label': + MessageLookupByLibrary.simpleMessage('Elimina episodio scaricato'), + 'delete_episode_confirmation': MessageLookupByLibrary.simpleMessage( + 'Sicura/o di voler eliminare questo episodio?'), + 'delete_episode_title': + MessageLookupByLibrary.simpleMessage('Elimina Episodio'), + 'delete_label': MessageLookupByLibrary.simpleMessage('Elimina'), + 'discover': MessageLookupByLibrary.simpleMessage('Scopri'), + 'discovery_categories_itunes': MessageLookupByLibrary.simpleMessage( + ',Arte,Business,Commedia,Educazione,Fiction,Governativi,Salute e Benessere,Storia,Bambini e Famiglia,Tempo Libero,Musica,Notizie,Religione e Spiritualità,Scienza,Società e Cultura,Sport,TV e Film,Tecnologia,True Crime'), + 'discovery_categories_pindex': MessageLookupByLibrary.simpleMessage( + ',Dopo-Spettacolo,Alternativi,Animali,Animazione,Arte,Astronomia,Automotive,Aviazione,Baseball,Pallacanestro,Bellezza,Libri,Buddismo,Business,Carriera,Chimica,Cristianità,Clima,Commedia,Commenti,Corsi,Artigianato,Cricket,Cryptocurrency,Cultura,Giornalieri,Design,Documentari,Dramma,Terra,Educazione,Intrattenimento,Imprenditoria,Famiglia,Fantasy,Fashion,Fiction,Film,Fitness,Cibo,Football,Giochi,Giardinaggio,Golf,Governativi,Salute,Induismo,Storia,Hobbies,Hockey,Casa,Come Fare,Improvvisazione,Interviste,Investimenti,Islam,Giornalismo,Giudaismo,Bambini,Lingue,Apprendimento,Tempo-Libero,Stili di Vita,Gestione,Manga,Marketing,Matematica,Medicina,Mentale,Musica,Naturale,Natura,Notizie,NonProfit,Nutrizione,Genitorialità,Esecuzione,Personale,Animali-Domestici,Filosofia,Fisica,Posti,Politica,Relazioni,Religione,Recensioni,Giochi-di-Ruolo,Rugby,Corsa,Scienza,Miglioramento-Personale,Sessualità,Calcio,Social,Società,Spiritualità,Sports,Stand-Up,Storie,Nuoto,TV,Tabletop,Tecnologia,Tennis,Viaggi,True Crime,Video-Giochi,Visivo,Pallavolo,Meteo,Natura-Selvaggia,Wrestling'), + 'download_episode_button_label': + MessageLookupByLibrary.simpleMessage('Scarica episodio'), + 'downloads': MessageLookupByLibrary.simpleMessage('Scaricati'), + 'empty_queue_message': + MessageLookupByLibrary.simpleMessage('La tua coda è vuota'), + 'episode_details_button_label': MessageLookupByLibrary.simpleMessage( + 'Mostra le informazioni sull\'episodio'), + 'episode_filter_clear_filters_button_label': + MessageLookupByLibrary.simpleMessage('Pulisci i Filtri'), + 'episode_filter_no_episodes_title_description': + MessageLookupByLibrary.simpleMessage( + 'Questo podcast non ha episodi che corrispondono ai tuoi criteri di ricerca e filtro'), + 'episode_filter_no_episodes_title_label': + MessageLookupByLibrary.simpleMessage('Nessun episodio trovato'), + 'episode_filter_none_label': + MessageLookupByLibrary.simpleMessage('Nessuno'), + 'episode_filter_played_label': + MessageLookupByLibrary.simpleMessage('Riprodotto'), + 'episode_filter_semantic_label': + MessageLookupByLibrary.simpleMessage('Filtra gli episodi'), + 'episode_filter_started_label': + MessageLookupByLibrary.simpleMessage('Avviato'), + 'episode_filter_unplayed_label': + MessageLookupByLibrary.simpleMessage('Non riprodotto'), + 'episode_label': MessageLookupByLibrary.simpleMessage('Episodio'), + 'episode_sort_alphabetical_ascending_label': + MessageLookupByLibrary.simpleMessage('Ordine Alfabetico A-Z'), + 'episode_sort_alphabetical_descending_label': + MessageLookupByLibrary.simpleMessage('Ordine Alfabetico Z-A'), + 'episode_sort_earliest_first_label': + MessageLookupByLibrary.simpleMessage('I più vecchi'), + 'episode_sort_latest_first_label': + MessageLookupByLibrary.simpleMessage('Gli ultimi'), + 'episode_sort_none_label': + MessageLookupByLibrary.simpleMessage('Default'), + 'episode_sort_semantic_label': + MessageLookupByLibrary.simpleMessage('Ordina gli episodi'), + 'error_no_connection': MessageLookupByLibrary.simpleMessage( + 'Impossibile riprodurre l\'episodio. Per favore, verifica la tua connessione e prova di nuovo.'), + 'error_playback_fail': MessageLookupByLibrary.simpleMessage( + 'Sì è verificato un errore inatteso durante la riproduzione. Per favore, verifica la tua connessione e prova di nuovo.'), + 'fast_forward_button_label': + MessageLookupByLibrary.simpleMessage('Manda avanti di 30 secondi'), + 'feedback_menu_item_label': + MessageLookupByLibrary.simpleMessage('Feedback'), + 'go_back_button_label': + MessageLookupByLibrary.simpleMessage('Torna indietro'), + 'label_opml_importing': + MessageLookupByLibrary.simpleMessage('Importazione in corso'), + 'layout_label': MessageLookupByLibrary.simpleMessage('Layout'), + 'library': MessageLookupByLibrary.simpleMessage('Libreria'), + 'mark_episodes_not_played_label': MessageLookupByLibrary.simpleMessage( + 'Marca tutti gli episodi come non riprodotti'), + 'mark_episodes_played_label': MessageLookupByLibrary.simpleMessage( + 'Marca tutti gli episodi come riprodotti'), + 'mark_played_label': + MessageLookupByLibrary.simpleMessage('Marca Riprodotto'), + 'mark_unplayed_label': + MessageLookupByLibrary.simpleMessage('Marca da Riprodurre'), + 'minimise_player_window_button_label': + MessageLookupByLibrary.simpleMessage( + 'Minimizza la finestra del player'), + 'more_label': MessageLookupByLibrary.simpleMessage('Di Più'), + 'new_episodes_label': MessageLookupByLibrary.simpleMessage( + 'Nuovi episodi sono disponibili'), + 'new_episodes_view_now_label': + MessageLookupByLibrary.simpleMessage('VEDI ORA'), + 'no_downloads_message': MessageLookupByLibrary.simpleMessage( + 'Non hai nessun episodio scaricato'), + 'no_podcast_details_message': MessageLookupByLibrary.simpleMessage( + 'Non è possibile caricare gli episodi. Verifica la tua connessione, per favore.'), + 'no_search_results_message': + MessageLookupByLibrary.simpleMessage('Nessun podcast trovato'), + 'no_subscriptions_message': MessageLookupByLibrary.simpleMessage( + 'Tappa il pulsante di ricerca sottostante o usa la barra di ricerca per trovare il tuo primo podcast'), + 'no_transcript_available_label': MessageLookupByLibrary.simpleMessage( + 'Nessuna trascrizione disponibile per questo podcast'), + 'notes_label': MessageLookupByLibrary.simpleMessage('Note'), + 'now_playing_episode_position': + MessageLookupByLibrary.simpleMessage('Posizione dell\'episodio'), + 'now_playing_episode_time_remaining': + MessageLookupByLibrary.simpleMessage('Tempo rimanente'), + 'now_playing_queue_label': + MessageLookupByLibrary.simpleMessage('In Riproduzione'), + 'ok_button_label': MessageLookupByLibrary.simpleMessage('OK'), + 'open_show_website_label': + MessageLookupByLibrary.simpleMessage('Vai al sito web dello show'), + 'opml_export_button_label': + MessageLookupByLibrary.simpleMessage('Esporta'), + 'opml_import_button_label': + MessageLookupByLibrary.simpleMessage('Importa'), + 'opml_import_export_label': + MessageLookupByLibrary.simpleMessage('OPML Importa/Esporta'), + 'pause_button_label': + MessageLookupByLibrary.simpleMessage('Sospendi episodio'), + 'play_button_label': + MessageLookupByLibrary.simpleMessage('Riproduci episodio'), + 'play_download_button_label': MessageLookupByLibrary.simpleMessage( + 'Riproduci l\'episodio scaricato'), + 'playback_speed_label': + MessageLookupByLibrary.simpleMessage('Velocità di riproduzione'), + 'podcast_funding_dialog_header': + MessageLookupByLibrary.simpleMessage('Podcast Fondi'), + 'podcast_options_overflow_menu_semantic_label': + MessageLookupByLibrary.simpleMessage('Menu opzioni'), + 'queue_add_label': MessageLookupByLibrary.simpleMessage('Aggiungi'), + 'queue_clear_button_label': + MessageLookupByLibrary.simpleMessage('Svuota'), + 'queue_clear_label': MessageLookupByLibrary.simpleMessage( + 'Sicuro/a di voler ripulire la coda?'), + 'queue_clear_label_title': + MessageLookupByLibrary.simpleMessage('Svuota la Coda'), + 'queue_remove_label': MessageLookupByLibrary.simpleMessage('Rimuovi'), + 'refresh_feed_label': + MessageLookupByLibrary.simpleMessage('Recupera nuovi episodi'), + 'resume_button_label': + MessageLookupByLibrary.simpleMessage('Riprendi episodio'), + 'rewind_button_label': + MessageLookupByLibrary.simpleMessage('Riavvolgi di 10 secondi'), + 'scrim_episode_details_selector': MessageLookupByLibrary.simpleMessage( + 'Chiudi i dettagli dell\'episodio'), + 'scrim_episode_filter_selector': MessageLookupByLibrary.simpleMessage( + 'Chiudi il filtro degli episodi'), + 'scrim_episode_sort_selector': MessageLookupByLibrary.simpleMessage( + 'Chiudi ordinamento degli episodi'), + 'scrim_layout_selector': MessageLookupByLibrary.simpleMessage( + 'Chiudi il selettore del layout'), + 'scrim_sleep_timer_selector': MessageLookupByLibrary.simpleMessage( + 'Chiudere il selettore del timer di spegnimento'), + 'scrim_speed_selector': MessageLookupByLibrary.simpleMessage( + 'Chiudere il selettore della velocità di riproduzione'), + 'search_back_button_label': + MessageLookupByLibrary.simpleMessage('Indietro'), + 'search_button_label': MessageLookupByLibrary.simpleMessage('Cerca'), + 'search_episodes_label': + MessageLookupByLibrary.simpleMessage('Cerca episodi'), + 'search_for_podcasts_hint': + MessageLookupByLibrary.simpleMessage('Ricerca dei podcasts'), + 'search_provider_label': + MessageLookupByLibrary.simpleMessage('Provider di ricerca'), + 'search_transcript_label': + MessageLookupByLibrary.simpleMessage('Cerca trascrizione'), + 'semantic_announce_searching': MessageLookupByLibrary.simpleMessage( + 'Ricerca in corso, attender prego.'), + 'semantic_chapter_link_label': + MessageLookupByLibrary.simpleMessage('Web link al capitolo'), + 'semantic_current_chapter_label': + MessageLookupByLibrary.simpleMessage('Capitolo attuale'), + 'semantic_current_value_label': + MessageLookupByLibrary.simpleMessage('Impostazioni correnti'), + 'semantic_playing_options_collapse_label': + MessageLookupByLibrary.simpleMessage( + 'Chiudere il cursore delle opzioni di riproduzione'), + 'semantic_playing_options_expand_label': + MessageLookupByLibrary.simpleMessage( + 'Aprire il cursore delle opzioni di riproduzione'), + 'semantic_podcast_artwork_label': + MessageLookupByLibrary.simpleMessage('Podcast artwork'), + 'semantics_add_to_queue': + MessageLookupByLibrary.simpleMessage('Aggiungi episodio alla coda'), + 'semantics_collapse_podcast_description': + MessageLookupByLibrary.simpleMessage( + 'Collassa la descrizione del podcast'), + 'semantics_decrease_playback_speed': + MessageLookupByLibrary.simpleMessage('Rallenta la riproduzione'), + 'semantics_episode_tile_collapsed': MessageLookupByLibrary.simpleMessage( + 'Voce dell\'elenco degli episodi. Visualizza immagine, sommario e i controlli principali.'), + 'semantics_episode_tile_collapsed_hint': + MessageLookupByLibrary.simpleMessage( + 'espandi e visualizza più dettagli e opzioni aggiuntive'), + 'semantics_episode_tile_expanded': MessageLookupByLibrary.simpleMessage( + 'Voce dell\'elenco degli episodi. Visualizza descrizione, controlli principali e controlli aggiuntivi.'), + 'semantics_episode_tile_expanded_hint': + MessageLookupByLibrary.simpleMessage( + 'collassa e visualizza il sommario, download e controlli di riproduzione'), + 'semantics_expand_podcast_description': + MessageLookupByLibrary.simpleMessage( + 'Espandi la descrizione del podcast'), + 'semantics_increase_playback_speed': + MessageLookupByLibrary.simpleMessage('Incrementa la riproduzione'), + 'semantics_layout_option_compact_grid': + MessageLookupByLibrary.simpleMessage('Griglia compatta'), + 'semantics_layout_option_grid': + MessageLookupByLibrary.simpleMessage('Griglia'), + 'semantics_layout_option_list': + MessageLookupByLibrary.simpleMessage('Lista'), + 'semantics_main_player_header': MessageLookupByLibrary.simpleMessage( + 'Finestra principale del player'), + 'semantics_mark_episode_played': MessageLookupByLibrary.simpleMessage( + 'Marca Episodio come riprodotto'), + 'semantics_mark_episode_unplayed': MessageLookupByLibrary.simpleMessage( + 'Marca Episodio come non-riprodotto'), + 'semantics_mini_player_header': MessageLookupByLibrary.simpleMessage( + 'Mini player. Swipe a destra per riprodurre/mettere in pausa. Attivare per aprire la finestra principale del player'), + 'semantics_play_pause_toggle': + MessageLookupByLibrary.simpleMessage('Play/pause toggle'), + 'semantics_podcast_details_header': + MessageLookupByLibrary.simpleMessage( + 'Podcast pagina dettagli ed episodi'), + 'semantics_remove_from_queue': + MessageLookupByLibrary.simpleMessage('Rimuovi episodio dalla coda'), + 'settings_auto_open_now_playing': MessageLookupByLibrary.simpleMessage( + 'Player a tutto schermo quando l\'episodio inizia'), + 'settings_auto_update_episodes': MessageLookupByLibrary.simpleMessage( + 'Aggiorna automaticamente gli episodi'), + 'settings_auto_update_episodes_10min': + MessageLookupByLibrary.simpleMessage( + '10 minuti dall\'ultimo aggiornamento'), + 'settings_auto_update_episodes_12hour': + MessageLookupByLibrary.simpleMessage( + '12 ore dall\'ultimo aggiornamento'), + 'settings_auto_update_episodes_1hour': + MessageLookupByLibrary.simpleMessage( + '1 ora dall\'ultimo aggiornamento'), + 'settings_auto_update_episodes_30min': + MessageLookupByLibrary.simpleMessage( + '30 minuti dall\'ultimo aggiornamento'), + 'settings_auto_update_episodes_3hour': + MessageLookupByLibrary.simpleMessage( + '3 ore dall\'ultimo aggiornamento'), + 'settings_auto_update_episodes_6hour': + MessageLookupByLibrary.simpleMessage( + '6 ore dall\'ultimo aggiornamento'), + 'settings_auto_update_episodes_always': + MessageLookupByLibrary.simpleMessage('Sempre'), + 'settings_auto_update_episodes_heading': + MessageLookupByLibrary.simpleMessage( + 'Aggiorna gli episodi nella schermata successiva'), + 'settings_auto_update_episodes_never': + MessageLookupByLibrary.simpleMessage('Mai'), + 'settings_data_divider_label': + MessageLookupByLibrary.simpleMessage('DATI'), + 'settings_delete_played_label': MessageLookupByLibrary.simpleMessage( + 'Elimina gli episodi scaricati una volta riprodotti'), + 'settings_download_sd_card_label': MessageLookupByLibrary.simpleMessage( + 'Scarica gli episodi nella card SD'), + 'settings_download_switch_card': MessageLookupByLibrary.simpleMessage( + 'I nuovi downloads saranno salvati nella card SD. I downloads esistenti rimarranno nello storage interno.'), + 'settings_download_switch_internal': MessageLookupByLibrary.simpleMessage( + 'I nuovi downloads saranno salvati nello storage interno. I downloads esistenti rimarranno nella card SD.'), + 'settings_download_switch_label': MessageLookupByLibrary.simpleMessage( + 'Cambia la posizione per lo storage'), + 'settings_episodes_divider_label': + MessageLookupByLibrary.simpleMessage('EPISODI'), + 'settings_export_opml': + MessageLookupByLibrary.simpleMessage('Esporta OPML'), + 'settings_import_opml': + MessageLookupByLibrary.simpleMessage('Importa OPML'), + 'settings_label': MessageLookupByLibrary.simpleMessage('Impostazioni'), + 'settings_mark_deleted_played_label': + MessageLookupByLibrary.simpleMessage( + 'Marca gli episodi eliminati come riprodotti'), + 'settings_personalisation_divider_label': + MessageLookupByLibrary.simpleMessage('PERSONALIZZAZIONI'), + 'settings_playback_divider_label': + MessageLookupByLibrary.simpleMessage('RIPRODUZIONE'), + 'settings_theme_switch_label': + MessageLookupByLibrary.simpleMessage('Tema scuro'), + 'show_notes_label': + MessageLookupByLibrary.simpleMessage('Visualizza le note'), + 'sleep_episode_label': + MessageLookupByLibrary.simpleMessage('Fine dell\'episodio'), + 'sleep_minute_label': m0, + 'sleep_off_label': MessageLookupByLibrary.simpleMessage('Off'), + 'sleep_timer_label': + MessageLookupByLibrary.simpleMessage('Timer di Riposo'), + 'stop_download_button_label': + MessageLookupByLibrary.simpleMessage('Stop'), + 'stop_download_confirmation': MessageLookupByLibrary.simpleMessage( + 'Sicura/o di voler fermare il download ed eliminare l\'episodio?'), + 'stop_download_title': + MessageLookupByLibrary.simpleMessage('Stop Download'), + 'subscribe_button_label': MessageLookupByLibrary.simpleMessage('Segui'), + 'subscribe_label': MessageLookupByLibrary.simpleMessage('Segui'), + 'transcript_label': + MessageLookupByLibrary.simpleMessage('Trascrizioni'), + 'transcript_why_not_label': + MessageLookupByLibrary.simpleMessage('Perché no?'), + 'transcript_why_not_url': MessageLookupByLibrary.simpleMessage( + 'https://www.pinepods.online/docs/Features/Transcript'), + 'unsubscribe_button_label': + MessageLookupByLibrary.simpleMessage('Non Seguire'), + 'unsubscribe_label': + MessageLookupByLibrary.simpleMessage('Smetti di seguire'), + 'unsubscribe_message': MessageLookupByLibrary.simpleMessage( + 'Smettendo di seguire questo podcast, tutti gli episodi scaricati verranno eliminati.'), + 'up_next_queue_label': + MessageLookupByLibrary.simpleMessage('Vai al Prossimo') + }; } diff --git a/mobile/lib/l10n/messages_messages.dart b/mobile/lib/l10n/messages_messages.dart index 492fa37f..66a4fed1 100644 --- a/mobile/lib/l10n/messages_messages.dart +++ b/mobile/lib/l10n/messages_messages.dart @@ -14,8 +14,7 @@ import 'package:intl/message_lookup_by_library.dart'; final messages = MessageLookup(); -typedef String? MessageIfAbsent( - String? messageStr, List? args); +typedef String? MessageIfAbsent(String? messageStr, List? args); class MessageLookup extends MessageLookupByLibrary { @override @@ -24,176 +23,327 @@ class MessageLookup extends MessageLookupByLibrary { static m0(minutes) => "${minutes} minutes"; @override - final Map messages = _notInlinedMessages(_notInlinedMessages); + final Map messages = + _notInlinedMessages(_notInlinedMessages); static Map _notInlinedMessages(_) => { - 'about_label': MessageLookupByLibrary.simpleMessage('About'), - 'add_rss_feed_option': MessageLookupByLibrary.simpleMessage('Add RSS Feed'), - 'app_title': MessageLookupByLibrary.simpleMessage('Pinepods Podcast Client'), - 'app_title_short': MessageLookupByLibrary.simpleMessage('Pinepods'), - 'audio_effect_trim_silence_label': MessageLookupByLibrary.simpleMessage('Trim Silence'), - 'audio_effect_volume_boost_label': MessageLookupByLibrary.simpleMessage('Volume Boost'), - 'audio_settings_playback_speed_label': MessageLookupByLibrary.simpleMessage('Playback Speed'), - 'auto_scroll_transcript_label': MessageLookupByLibrary.simpleMessage('Follow transcript'), - 'cancel_button_label': MessageLookupByLibrary.simpleMessage('Cancel'), - 'cancel_download_button_label': MessageLookupByLibrary.simpleMessage('Cancel download'), - 'cancel_option_label': MessageLookupByLibrary.simpleMessage('Cancel'), - 'chapters_label': MessageLookupByLibrary.simpleMessage('Chapters'), - 'clear_queue_button_label': MessageLookupByLibrary.simpleMessage('CLEAR QUEUE'), - 'clear_search_button_label': MessageLookupByLibrary.simpleMessage('Clear search text'), - 'close_button_label': MessageLookupByLibrary.simpleMessage('Close'), - 'consent_message': MessageLookupByLibrary.simpleMessage('This funding link will take you to an external site where you will be able to directly support the show. Links are provided by the podcast authors and is not controlled by Pinepods.'), - 'continue_button_label': MessageLookupByLibrary.simpleMessage('Continue'), - 'delete_button_label': MessageLookupByLibrary.simpleMessage('Delete'), - 'delete_episode_button_label': MessageLookupByLibrary.simpleMessage('Delete downloaded episode'), - 'delete_episode_confirmation': MessageLookupByLibrary.simpleMessage('Are you sure you wish to delete this episode?'), - 'delete_episode_title': MessageLookupByLibrary.simpleMessage('Delete Episode'), - 'delete_label': MessageLookupByLibrary.simpleMessage('Delete'), - 'discover': MessageLookupByLibrary.simpleMessage('Discover'), - 'discovery_categories_itunes': MessageLookupByLibrary.simpleMessage(',Arts,Business,Comedy,Education,Fiction,Government,Health & Fitness,History,Kids & Family,Leisure,Music,News,Religion & Spirituality,Science,Society & Culture,Sports,TV & Film,Technology,True Crime'), - 'discovery_categories_pindex': MessageLookupByLibrary.simpleMessage(',After-Shows,Alternative,Animals,Animation,Arts,Astronomy,Automotive,Aviation,Baseball,Basketball,Beauty,Books,Buddhism,Business,Careers,Chemistry,Christianity,Climate,Comedy,Commentary,Courses,Crafts,Cricket,Cryptocurrency,Culture,Daily,Design,Documentary,Drama,Earth,Education,Entertainment,Entrepreneurship,Family,Fantasy,Fashion,Fiction,Film,Fitness,Food,Football,Games,Garden,Golf,Government,Health,Hinduism,History,Hobbies,Hockey,Home,HowTo,Improv,Interviews,Investing,Islam,Journals,Judaism,Kids,Language,Learning,Leisure,Life,Management,Manga,Marketing,Mathematics,Medicine,Mental,Music,Natural,Nature,News,NonProfit,Nutrition,Parenting,Performing,Personal,Pets,Philosophy,Physics,Places,Politics,Relationships,Religion,Reviews,Role-Playing,Rugby,Running,Science,Self-Improvement,Sexuality,Soccer,Social,Society,Spirituality,Sports,Stand-Up,Stories,Swimming,TV,Tabletop,Technology,Tennis,Travel,True Crime,Video-Games,Visual,Volleyball,Weather,Wilderness,Wrestling'), - 'download_episode_button_label': MessageLookupByLibrary.simpleMessage('Download episode'), - 'downloads': MessageLookupByLibrary.simpleMessage('Downloads'), - 'empty_queue_message': MessageLookupByLibrary.simpleMessage('Your queue is empty'), - 'episode_details_button_label': MessageLookupByLibrary.simpleMessage('Show episode information'), - 'episode_filter_clear_filters_button_label': MessageLookupByLibrary.simpleMessage('Clear Filters'), - 'episode_filter_no_episodes_title_description': MessageLookupByLibrary.simpleMessage('No Episodes Found'), - 'episode_filter_no_episodes_title_label': MessageLookupByLibrary.simpleMessage('No Episodes Found'), - 'episode_filter_none_label': MessageLookupByLibrary.simpleMessage('None'), - 'episode_filter_played_label': MessageLookupByLibrary.simpleMessage('Played'), - 'episode_filter_semantic_label': MessageLookupByLibrary.simpleMessage('Episode filter'), - 'episode_filter_started_label': MessageLookupByLibrary.simpleMessage('Started'), - 'episode_filter_unplayed_label': MessageLookupByLibrary.simpleMessage('Unplayed'), - 'episode_label': MessageLookupByLibrary.simpleMessage('Episode'), - 'episode_sort_alphabetical_ascending_label': MessageLookupByLibrary.simpleMessage('Alphabetical A-Z'), - 'episode_sort_alphabetical_descending_label': MessageLookupByLibrary.simpleMessage('Alphabetical Z-A'), - 'episode_sort_earliest_first_label': MessageLookupByLibrary.simpleMessage('Earliest first'), - 'episode_sort_latest_first_label': MessageLookupByLibrary.simpleMessage('Latest first'), - 'episode_sort_none_label': MessageLookupByLibrary.simpleMessage('Default'), - 'episode_sort_semantic_label': MessageLookupByLibrary.simpleMessage('Episode sort'), - 'error_no_connection': MessageLookupByLibrary.simpleMessage('Unable to play episode. Please check your connection and try again.'), - 'error_playback_fail': MessageLookupByLibrary.simpleMessage('An unexpected error occurred during playback. Please check your connection and try again.'), - 'fast_forward_button_label': MessageLookupByLibrary.simpleMessage('Fast-forward episode 30 seconds'), - 'feedback_menu_item_label': MessageLookupByLibrary.simpleMessage('Feedback'), - 'go_back_button_label': MessageLookupByLibrary.simpleMessage('Go Back'), - 'label_opml_importing': MessageLookupByLibrary.simpleMessage('Importing'), - 'layout_label': MessageLookupByLibrary.simpleMessage('Layout'), - 'library': MessageLookupByLibrary.simpleMessage('Library'), - 'mark_episodes_not_played_label': MessageLookupByLibrary.simpleMessage('Mark all episodes as not played'), - 'mark_episodes_played_label': MessageLookupByLibrary.simpleMessage('Mark all episodes as played'), - 'mark_played_label': MessageLookupByLibrary.simpleMessage('Mark Played'), - 'mark_unplayed_label': MessageLookupByLibrary.simpleMessage('Mark Unplayed'), - 'minimise_player_window_button_label': MessageLookupByLibrary.simpleMessage('Minimise player window'), - 'more_label': MessageLookupByLibrary.simpleMessage('More'), - 'new_episodes_label': MessageLookupByLibrary.simpleMessage('New episodes are available'), - 'new_episodes_view_now_label': MessageLookupByLibrary.simpleMessage('VIEW NOW'), - 'no_downloads_message': MessageLookupByLibrary.simpleMessage('You do not have any downloaded episodes'), - 'no_podcast_details_message': MessageLookupByLibrary.simpleMessage('Could not load podcast episodes. Please check your connection.'), - 'no_search_results_message': MessageLookupByLibrary.simpleMessage('No podcasts found'), - 'no_subscriptions_message': MessageLookupByLibrary.simpleMessage('Head to Settings to Connect a Pinepods Server if you haven\'t yet!'), - 'no_transcript_available_label': MessageLookupByLibrary.simpleMessage('A transcript is not available for this podcast'), - 'notes_label': MessageLookupByLibrary.simpleMessage('Description'), - 'now_playing_episode_position': MessageLookupByLibrary.simpleMessage('Episode position'), - 'now_playing_episode_time_remaining': MessageLookupByLibrary.simpleMessage('Time remaining'), - 'now_playing_queue_label': MessageLookupByLibrary.simpleMessage('Now Playing'), - 'ok_button_label': MessageLookupByLibrary.simpleMessage('OK'), - 'open_show_website_label': MessageLookupByLibrary.simpleMessage('Open show website'), - 'opml_export_button_label': MessageLookupByLibrary.simpleMessage('Export'), - 'opml_import_button_label': MessageLookupByLibrary.simpleMessage('Import'), - 'opml_import_export_label': MessageLookupByLibrary.simpleMessage('OPML Import/Export'), - 'pause_button_label': MessageLookupByLibrary.simpleMessage('Pause episode'), - 'play_button_label': MessageLookupByLibrary.simpleMessage('Play episode'), - 'play_download_button_label': MessageLookupByLibrary.simpleMessage('Play downloaded episode'), - 'playback_speed_label': MessageLookupByLibrary.simpleMessage('Playback speed'), - 'podcast_funding_dialog_header': MessageLookupByLibrary.simpleMessage('Podcast Funding'), - 'podcast_options_overflow_menu_semantic_label': MessageLookupByLibrary.simpleMessage('Options menu'), - 'queue_add_label': MessageLookupByLibrary.simpleMessage('Add'), - 'queue_clear_button_label': MessageLookupByLibrary.simpleMessage('Clear'), - 'queue_clear_label': MessageLookupByLibrary.simpleMessage('Are you sure you wish to clear the queue?'), - 'queue_clear_label_title': MessageLookupByLibrary.simpleMessage('Clear Queue'), - 'queue_remove_label': MessageLookupByLibrary.simpleMessage('Remove'), - 'refresh_feed_label': MessageLookupByLibrary.simpleMessage('Refresh episodes'), - 'resume_button_label': MessageLookupByLibrary.simpleMessage('Resume episode'), - 'rewind_button_label': MessageLookupByLibrary.simpleMessage('Rewind episode 10 seconds'), - 'scrim_episode_details_selector': MessageLookupByLibrary.simpleMessage('Dismiss episode details'), - 'scrim_episode_filter_selector': MessageLookupByLibrary.simpleMessage('Dismiss episode filter'), - 'scrim_episode_sort_selector': MessageLookupByLibrary.simpleMessage('Dismiss episode sort'), - 'scrim_layout_selector': MessageLookupByLibrary.simpleMessage('Dismiss layout selector'), - 'scrim_sleep_timer_selector': MessageLookupByLibrary.simpleMessage('Dismiss sleep timer selector'), - 'scrim_speed_selector': MessageLookupByLibrary.simpleMessage('Dismiss playback speed selector'), - 'search_back_button_label': MessageLookupByLibrary.simpleMessage('Back'), - 'search_button_label': MessageLookupByLibrary.simpleMessage('Search'), - 'search_episodes_label': MessageLookupByLibrary.simpleMessage('Search episodes'), - 'search_for_podcasts_hint': MessageLookupByLibrary.simpleMessage('Search for podcasts'), - 'search_provider_label': MessageLookupByLibrary.simpleMessage('Search provider'), - 'search_transcript_label': MessageLookupByLibrary.simpleMessage('Search transcript'), - 'semantic_announce_searching': MessageLookupByLibrary.simpleMessage('Searching, please wait.'), - 'semantic_chapter_link_label': MessageLookupByLibrary.simpleMessage('Chapter web link'), - 'semantic_current_chapter_label': MessageLookupByLibrary.simpleMessage('Current chapter'), - 'semantic_current_value_label': MessageLookupByLibrary.simpleMessage('Current value'), - 'semantic_playing_options_collapse_label': MessageLookupByLibrary.simpleMessage('Close playing options slider'), - 'semantic_playing_options_expand_label': MessageLookupByLibrary.simpleMessage('Open playing options slider'), - 'semantic_podcast_artwork_label': MessageLookupByLibrary.simpleMessage('Podcast artwork'), - 'semantics_add_to_queue': MessageLookupByLibrary.simpleMessage('Add episode to queue'), - 'semantics_collapse_podcast_description': MessageLookupByLibrary.simpleMessage('Collapse podcast description'), - 'semantics_decrease_playback_speed': MessageLookupByLibrary.simpleMessage('Decrease playback speed'), - 'semantics_episode_tile_collapsed': MessageLookupByLibrary.simpleMessage('Episode list item. Showing image, summary and main controls.'), - 'semantics_episode_tile_collapsed_hint': MessageLookupByLibrary.simpleMessage('expand and show more details and additional options'), - 'semantics_episode_tile_expanded': MessageLookupByLibrary.simpleMessage('Episode list item. Showing description, main controls and additional controls.'), - 'semantics_episode_tile_expanded_hint': MessageLookupByLibrary.simpleMessage('collapse and show summary, download and play control'), - 'semantics_expand_podcast_description': MessageLookupByLibrary.simpleMessage('Expand podcast description'), - 'semantics_increase_playback_speed': MessageLookupByLibrary.simpleMessage('Increase playback speed'), - 'semantics_layout_option_compact_grid': MessageLookupByLibrary.simpleMessage('Compact grid layout'), - 'semantics_layout_option_grid': MessageLookupByLibrary.simpleMessage('Grid layout'), - 'semantics_layout_option_list': MessageLookupByLibrary.simpleMessage('List layout'), - 'semantics_main_player_header': MessageLookupByLibrary.simpleMessage('Main player window'), - 'semantics_mark_episode_played': MessageLookupByLibrary.simpleMessage('Mark Episode as played'), - 'semantics_mark_episode_unplayed': MessageLookupByLibrary.simpleMessage('Mark Episode as un-played'), - 'semantics_mini_player_header': MessageLookupByLibrary.simpleMessage('Mini player. Swipe right to play/pause button. Activate to open main player window'), - 'semantics_play_pause_toggle': MessageLookupByLibrary.simpleMessage('Play/pause toggle'), - 'semantics_podcast_details_header': MessageLookupByLibrary.simpleMessage('Podcast details and episodes page'), - 'semantics_remove_from_queue': MessageLookupByLibrary.simpleMessage('Remove episode from queue'), - 'settings_auto_open_now_playing': MessageLookupByLibrary.simpleMessage('Full screen player mode on episode start'), - 'settings_auto_update_episodes': MessageLookupByLibrary.simpleMessage('Auto update episodes'), - 'settings_auto_update_episodes_10min': MessageLookupByLibrary.simpleMessage('10 minutes since last update'), - 'settings_auto_update_episodes_12hour': MessageLookupByLibrary.simpleMessage('12 hours since last update'), - 'settings_auto_update_episodes_1hour': MessageLookupByLibrary.simpleMessage('1 hour since last update'), - 'settings_auto_update_episodes_30min': MessageLookupByLibrary.simpleMessage('30 minutes since last update'), - 'settings_auto_update_episodes_3hour': MessageLookupByLibrary.simpleMessage('3 hours since last update'), - 'settings_auto_update_episodes_6hour': MessageLookupByLibrary.simpleMessage('6 hours since last update'), - 'settings_auto_update_episodes_always': MessageLookupByLibrary.simpleMessage('Always'), - 'settings_auto_update_episodes_heading': MessageLookupByLibrary.simpleMessage('Refresh episodes on details screen after'), - 'settings_auto_update_episodes_never': MessageLookupByLibrary.simpleMessage('Never'), - 'settings_data_divider_label': MessageLookupByLibrary.simpleMessage('DATA'), - 'settings_delete_played_label': MessageLookupByLibrary.simpleMessage('Delete downloaded episodes once played'), - 'settings_download_sd_card_label': MessageLookupByLibrary.simpleMessage('Download episodes to SD card'), - 'settings_download_switch_card': MessageLookupByLibrary.simpleMessage('New downloads will be saved to the SD card. Existing downloads will remain on internal storage.'), - 'settings_download_switch_internal': MessageLookupByLibrary.simpleMessage('New downloads will be saved to internal storage. Existing downloads will remain on the SD card.'), - 'settings_download_switch_label': MessageLookupByLibrary.simpleMessage('Change storage location'), - 'settings_episodes_divider_label': MessageLookupByLibrary.simpleMessage('EPISODES'), - 'settings_export_opml': MessageLookupByLibrary.simpleMessage('Export OPML'), - 'settings_import_opml': MessageLookupByLibrary.simpleMessage('Import OPML'), - 'settings_label': MessageLookupByLibrary.simpleMessage('Settings'), - 'settings_mark_deleted_played_label': MessageLookupByLibrary.simpleMessage('Mark deleted episodes as played'), - 'settings_personalisation_divider_label': MessageLookupByLibrary.simpleMessage('PERSONALISATION'), - 'settings_playback_divider_label': MessageLookupByLibrary.simpleMessage('PLAYBACK'), - 'settings_theme_switch_label': MessageLookupByLibrary.simpleMessage('Dark theme'), - 'show_notes_label': MessageLookupByLibrary.simpleMessage('Show notes'), - 'sleep_episode_label': MessageLookupByLibrary.simpleMessage('End of episode'), - 'sleep_minute_label': m0, - 'sleep_off_label': MessageLookupByLibrary.simpleMessage('Off'), - 'sleep_timer_label': MessageLookupByLibrary.simpleMessage('Sleep Timer'), - 'stop_download_button_label': MessageLookupByLibrary.simpleMessage('Stop'), - 'stop_download_confirmation': MessageLookupByLibrary.simpleMessage('Are you sure you wish to stop this download and delete the episode?'), - 'stop_download_title': MessageLookupByLibrary.simpleMessage('Stop Download'), - 'subscribe_button_label': MessageLookupByLibrary.simpleMessage('Follow'), - 'subscribe_label': MessageLookupByLibrary.simpleMessage('Follow'), - 'transcript_label': MessageLookupByLibrary.simpleMessage('Transcript'), - 'transcript_why_not_label': MessageLookupByLibrary.simpleMessage('Why not?'), - 'transcript_why_not_url': MessageLookupByLibrary.simpleMessage('https://anytimeplayer.app/docs/anytime_transcript_support_en.html'), - 'unsubscribe_button_label': MessageLookupByLibrary.simpleMessage('Unfollow'), - 'unsubscribe_label': MessageLookupByLibrary.simpleMessage('Unfollow'), - 'unsubscribe_message': MessageLookupByLibrary.simpleMessage('Unfollowing will delete all downloaded episodes of this podcast.'), - 'up_next_queue_label': MessageLookupByLibrary.simpleMessage('Up Next') - }; + 'about_label': MessageLookupByLibrary.simpleMessage('About'), + 'add_rss_feed_option': + MessageLookupByLibrary.simpleMessage('Add RSS Feed'), + 'app_title': + MessageLookupByLibrary.simpleMessage('Pinepods Podcast Client'), + 'app_title_short': MessageLookupByLibrary.simpleMessage('Pinepods'), + 'audio_effect_trim_silence_label': + MessageLookupByLibrary.simpleMessage('Trim Silence'), + 'audio_effect_volume_boost_label': + MessageLookupByLibrary.simpleMessage('Volume Boost'), + 'audio_settings_playback_speed_label': + MessageLookupByLibrary.simpleMessage('Playback Speed'), + 'auto_scroll_transcript_label': + MessageLookupByLibrary.simpleMessage('Follow transcript'), + 'cancel_button_label': MessageLookupByLibrary.simpleMessage('Cancel'), + 'cancel_download_button_label': + MessageLookupByLibrary.simpleMessage('Cancel download'), + 'cancel_option_label': MessageLookupByLibrary.simpleMessage('Cancel'), + 'chapters_label': MessageLookupByLibrary.simpleMessage('Chapters'), + 'clear_queue_button_label': + MessageLookupByLibrary.simpleMessage('CLEAR QUEUE'), + 'clear_search_button_label': + MessageLookupByLibrary.simpleMessage('Clear search text'), + 'close_button_label': MessageLookupByLibrary.simpleMessage('Close'), + 'consent_message': MessageLookupByLibrary.simpleMessage( + 'This funding link will take you to an external site where you will be able to directly support the show. Links are provided by the podcast authors and is not controlled by Pinepods.'), + 'continue_button_label': + MessageLookupByLibrary.simpleMessage('Continue'), + 'delete_button_label': MessageLookupByLibrary.simpleMessage('Delete'), + 'delete_episode_button_label': + MessageLookupByLibrary.simpleMessage('Delete downloaded episode'), + 'delete_episode_confirmation': MessageLookupByLibrary.simpleMessage( + 'Are you sure you wish to delete this episode?'), + 'delete_episode_title': + MessageLookupByLibrary.simpleMessage('Delete Episode'), + 'delete_label': MessageLookupByLibrary.simpleMessage('Delete'), + 'discover': MessageLookupByLibrary.simpleMessage('Discover'), + 'discovery_categories_itunes': MessageLookupByLibrary.simpleMessage( + ',Arts,Business,Comedy,Education,Fiction,Government,Health & Fitness,History,Kids & Family,Leisure,Music,News,Religion & Spirituality,Science,Society & Culture,Sports,TV & Film,Technology,True Crime'), + 'discovery_categories_pindex': MessageLookupByLibrary.simpleMessage( + ',After-Shows,Alternative,Animals,Animation,Arts,Astronomy,Automotive,Aviation,Baseball,Basketball,Beauty,Books,Buddhism,Business,Careers,Chemistry,Christianity,Climate,Comedy,Commentary,Courses,Crafts,Cricket,Cryptocurrency,Culture,Daily,Design,Documentary,Drama,Earth,Education,Entertainment,Entrepreneurship,Family,Fantasy,Fashion,Fiction,Film,Fitness,Food,Football,Games,Garden,Golf,Government,Health,Hinduism,History,Hobbies,Hockey,Home,HowTo,Improv,Interviews,Investing,Islam,Journals,Judaism,Kids,Language,Learning,Leisure,Life,Management,Manga,Marketing,Mathematics,Medicine,Mental,Music,Natural,Nature,News,NonProfit,Nutrition,Parenting,Performing,Personal,Pets,Philosophy,Physics,Places,Politics,Relationships,Religion,Reviews,Role-Playing,Rugby,Running,Science,Self-Improvement,Sexuality,Soccer,Social,Society,Spirituality,Sports,Stand-Up,Stories,Swimming,TV,Tabletop,Technology,Tennis,Travel,True Crime,Video-Games,Visual,Volleyball,Weather,Wilderness,Wrestling'), + 'download_episode_button_label': + MessageLookupByLibrary.simpleMessage('Download episode'), + 'downloads': MessageLookupByLibrary.simpleMessage('Downloads'), + 'empty_queue_message': + MessageLookupByLibrary.simpleMessage('Your queue is empty'), + 'episode_details_button_label': + MessageLookupByLibrary.simpleMessage('Show episode information'), + 'episode_filter_clear_filters_button_label': + MessageLookupByLibrary.simpleMessage('Clear Filters'), + 'episode_filter_no_episodes_title_description': + MessageLookupByLibrary.simpleMessage('No Episodes Found'), + 'episode_filter_no_episodes_title_label': + MessageLookupByLibrary.simpleMessage('No Episodes Found'), + 'episode_filter_none_label': + MessageLookupByLibrary.simpleMessage('None'), + 'episode_filter_played_label': + MessageLookupByLibrary.simpleMessage('Played'), + 'episode_filter_semantic_label': + MessageLookupByLibrary.simpleMessage('Episode filter'), + 'episode_filter_started_label': + MessageLookupByLibrary.simpleMessage('Started'), + 'episode_filter_unplayed_label': + MessageLookupByLibrary.simpleMessage('Unplayed'), + 'episode_label': MessageLookupByLibrary.simpleMessage('Episode'), + 'episode_sort_alphabetical_ascending_label': + MessageLookupByLibrary.simpleMessage('Alphabetical A-Z'), + 'episode_sort_alphabetical_descending_label': + MessageLookupByLibrary.simpleMessage('Alphabetical Z-A'), + 'episode_sort_earliest_first_label': + MessageLookupByLibrary.simpleMessage('Earliest first'), + 'episode_sort_latest_first_label': + MessageLookupByLibrary.simpleMessage('Latest first'), + 'episode_sort_none_label': + MessageLookupByLibrary.simpleMessage('Default'), + 'episode_sort_semantic_label': + MessageLookupByLibrary.simpleMessage('Episode sort'), + 'error_no_connection': MessageLookupByLibrary.simpleMessage( + 'Unable to play episode. Please check your connection and try again.'), + 'error_playback_fail': MessageLookupByLibrary.simpleMessage( + 'An unexpected error occurred during playback. Please check your connection and try again.'), + 'fast_forward_button_label': MessageLookupByLibrary.simpleMessage( + 'Fast-forward episode 30 seconds'), + 'feedback_menu_item_label': + MessageLookupByLibrary.simpleMessage('Feedback'), + 'go_back_button_label': MessageLookupByLibrary.simpleMessage('Go Back'), + 'label_opml_importing': + MessageLookupByLibrary.simpleMessage('Importing'), + 'layout_label': MessageLookupByLibrary.simpleMessage('Layout'), + 'library': MessageLookupByLibrary.simpleMessage('Library'), + 'mark_episodes_not_played_label': MessageLookupByLibrary.simpleMessage( + 'Mark all episodes as not played'), + 'mark_episodes_played_label': + MessageLookupByLibrary.simpleMessage('Mark all episodes as played'), + 'mark_played_label': + MessageLookupByLibrary.simpleMessage('Mark Played'), + 'mark_unplayed_label': + MessageLookupByLibrary.simpleMessage('Mark Unplayed'), + 'minimise_player_window_button_label': + MessageLookupByLibrary.simpleMessage('Minimise player window'), + 'more_label': MessageLookupByLibrary.simpleMessage('More'), + 'new_episodes_label': + MessageLookupByLibrary.simpleMessage('New episodes are available'), + 'new_episodes_view_now_label': + MessageLookupByLibrary.simpleMessage('VIEW NOW'), + 'no_downloads_message': MessageLookupByLibrary.simpleMessage( + 'You do not have any downloaded episodes'), + 'no_podcast_details_message': MessageLookupByLibrary.simpleMessage( + 'Could not load podcast episodes. Please check your connection.'), + 'no_search_results_message': + MessageLookupByLibrary.simpleMessage('No podcasts found'), + 'no_subscriptions_message': MessageLookupByLibrary.simpleMessage( + 'Head to Settings to Connect a Pinepods Server if you haven\'t yet!'), + 'no_transcript_available_label': MessageLookupByLibrary.simpleMessage( + 'A transcript is not available for this podcast'), + 'notes_label': MessageLookupByLibrary.simpleMessage('Description'), + 'now_playing_episode_position': + MessageLookupByLibrary.simpleMessage('Episode position'), + 'now_playing_episode_time_remaining': + MessageLookupByLibrary.simpleMessage('Time remaining'), + 'now_playing_queue_label': + MessageLookupByLibrary.simpleMessage('Now Playing'), + 'ok_button_label': MessageLookupByLibrary.simpleMessage('OK'), + 'open_show_website_label': + MessageLookupByLibrary.simpleMessage('Open show website'), + 'opml_export_button_label': + MessageLookupByLibrary.simpleMessage('Export'), + 'opml_import_button_label': + MessageLookupByLibrary.simpleMessage('Import'), + 'opml_import_export_label': + MessageLookupByLibrary.simpleMessage('OPML Import/Export'), + 'pause_button_label': + MessageLookupByLibrary.simpleMessage('Pause episode'), + 'play_button_label': + MessageLookupByLibrary.simpleMessage('Play episode'), + 'play_download_button_label': + MessageLookupByLibrary.simpleMessage('Play downloaded episode'), + 'playback_speed_label': + MessageLookupByLibrary.simpleMessage('Playback speed'), + 'podcast_funding_dialog_header': + MessageLookupByLibrary.simpleMessage('Podcast Funding'), + 'podcast_options_overflow_menu_semantic_label': + MessageLookupByLibrary.simpleMessage('Options menu'), + 'queue_add_label': MessageLookupByLibrary.simpleMessage('Add'), + 'queue_clear_button_label': + MessageLookupByLibrary.simpleMessage('Clear'), + 'queue_clear_label': MessageLookupByLibrary.simpleMessage( + 'Are you sure you wish to clear the queue?'), + 'queue_clear_label_title': + MessageLookupByLibrary.simpleMessage('Clear Queue'), + 'queue_remove_label': MessageLookupByLibrary.simpleMessage('Remove'), + 'refresh_feed_label': + MessageLookupByLibrary.simpleMessage('Refresh episodes'), + 'resume_button_label': + MessageLookupByLibrary.simpleMessage('Resume episode'), + 'rewind_button_label': + MessageLookupByLibrary.simpleMessage('Rewind episode 10 seconds'), + 'scrim_episode_details_selector': + MessageLookupByLibrary.simpleMessage('Dismiss episode details'), + 'scrim_episode_filter_selector': + MessageLookupByLibrary.simpleMessage('Dismiss episode filter'), + 'scrim_episode_sort_selector': + MessageLookupByLibrary.simpleMessage('Dismiss episode sort'), + 'scrim_layout_selector': + MessageLookupByLibrary.simpleMessage('Dismiss layout selector'), + 'scrim_sleep_timer_selector': MessageLookupByLibrary.simpleMessage( + 'Dismiss sleep timer selector'), + 'scrim_speed_selector': MessageLookupByLibrary.simpleMessage( + 'Dismiss playback speed selector'), + 'search_back_button_label': + MessageLookupByLibrary.simpleMessage('Back'), + 'search_button_label': MessageLookupByLibrary.simpleMessage('Search'), + 'search_episodes_label': + MessageLookupByLibrary.simpleMessage('Search episodes'), + 'search_for_podcasts_hint': + MessageLookupByLibrary.simpleMessage('Search for podcasts'), + 'search_provider_label': + MessageLookupByLibrary.simpleMessage('Search provider'), + 'search_transcript_label': + MessageLookupByLibrary.simpleMessage('Search transcript'), + 'semantic_announce_searching': + MessageLookupByLibrary.simpleMessage('Searching, please wait.'), + 'semantic_chapter_link_label': + MessageLookupByLibrary.simpleMessage('Chapter web link'), + 'semantic_current_chapter_label': + MessageLookupByLibrary.simpleMessage('Current chapter'), + 'semantic_current_value_label': + MessageLookupByLibrary.simpleMessage('Current value'), + 'semantic_playing_options_collapse_label': + MessageLookupByLibrary.simpleMessage( + 'Close playing options slider'), + 'semantic_playing_options_expand_label': + MessageLookupByLibrary.simpleMessage('Open playing options slider'), + 'semantic_podcast_artwork_label': + MessageLookupByLibrary.simpleMessage('Podcast artwork'), + 'semantics_add_to_queue': + MessageLookupByLibrary.simpleMessage('Add episode to queue'), + 'semantics_collapse_podcast_description': + MessageLookupByLibrary.simpleMessage( + 'Collapse podcast description'), + 'semantics_decrease_playback_speed': + MessageLookupByLibrary.simpleMessage('Decrease playback speed'), + 'semantics_episode_tile_collapsed': + MessageLookupByLibrary.simpleMessage( + 'Episode list item. Showing image, summary and main controls.'), + 'semantics_episode_tile_collapsed_hint': + MessageLookupByLibrary.simpleMessage( + 'expand and show more details and additional options'), + 'semantics_episode_tile_expanded': MessageLookupByLibrary.simpleMessage( + 'Episode list item. Showing description, main controls and additional controls.'), + 'semantics_episode_tile_expanded_hint': + MessageLookupByLibrary.simpleMessage( + 'collapse and show summary, download and play control'), + 'semantics_expand_podcast_description': + MessageLookupByLibrary.simpleMessage('Expand podcast description'), + 'semantics_increase_playback_speed': + MessageLookupByLibrary.simpleMessage('Increase playback speed'), + 'semantics_layout_option_compact_grid': + MessageLookupByLibrary.simpleMessage('Compact grid layout'), + 'semantics_layout_option_grid': + MessageLookupByLibrary.simpleMessage('Grid layout'), + 'semantics_layout_option_list': + MessageLookupByLibrary.simpleMessage('List layout'), + 'semantics_main_player_header': + MessageLookupByLibrary.simpleMessage('Main player window'), + 'semantics_mark_episode_played': + MessageLookupByLibrary.simpleMessage('Mark Episode as played'), + 'semantics_mark_episode_unplayed': + MessageLookupByLibrary.simpleMessage('Mark Episode as un-played'), + 'semantics_mini_player_header': MessageLookupByLibrary.simpleMessage( + 'Mini player. Swipe right to play/pause button. Activate to open main player window'), + 'semantics_play_pause_toggle': + MessageLookupByLibrary.simpleMessage('Play/pause toggle'), + 'semantics_podcast_details_header': + MessageLookupByLibrary.simpleMessage( + 'Podcast details and episodes page'), + 'semantics_remove_from_queue': + MessageLookupByLibrary.simpleMessage('Remove episode from queue'), + 'settings_auto_open_now_playing': MessageLookupByLibrary.simpleMessage( + 'Full screen player mode on episode start'), + 'settings_auto_update_episodes': + MessageLookupByLibrary.simpleMessage('Auto update episodes'), + 'settings_auto_update_episodes_10min': + MessageLookupByLibrary.simpleMessage( + '10 minutes since last update'), + 'settings_auto_update_episodes_12hour': + MessageLookupByLibrary.simpleMessage('12 hours since last update'), + 'settings_auto_update_episodes_1hour': + MessageLookupByLibrary.simpleMessage('1 hour since last update'), + 'settings_auto_update_episodes_30min': + MessageLookupByLibrary.simpleMessage( + '30 minutes since last update'), + 'settings_auto_update_episodes_3hour': + MessageLookupByLibrary.simpleMessage('3 hours since last update'), + 'settings_auto_update_episodes_6hour': + MessageLookupByLibrary.simpleMessage('6 hours since last update'), + 'settings_auto_update_episodes_always': + MessageLookupByLibrary.simpleMessage('Always'), + 'settings_auto_update_episodes_heading': + MessageLookupByLibrary.simpleMessage( + 'Refresh episodes on details screen after'), + 'settings_auto_update_episodes_never': + MessageLookupByLibrary.simpleMessage('Never'), + 'settings_data_divider_label': + MessageLookupByLibrary.simpleMessage('DATA'), + 'settings_delete_played_label': MessageLookupByLibrary.simpleMessage( + 'Delete downloaded episodes once played'), + 'settings_download_sd_card_label': MessageLookupByLibrary.simpleMessage( + 'Download episodes to SD card'), + 'settings_download_switch_card': MessageLookupByLibrary.simpleMessage( + 'New downloads will be saved to the SD card. Existing downloads will remain on internal storage.'), + 'settings_download_switch_internal': MessageLookupByLibrary.simpleMessage( + 'New downloads will be saved to internal storage. Existing downloads will remain on the SD card.'), + 'settings_download_switch_label': + MessageLookupByLibrary.simpleMessage('Change storage location'), + 'settings_episodes_divider_label': + MessageLookupByLibrary.simpleMessage('EPISODES'), + 'settings_export_opml': + MessageLookupByLibrary.simpleMessage('Export OPML'), + 'settings_import_opml': + MessageLookupByLibrary.simpleMessage('Import OPML'), + 'settings_label': MessageLookupByLibrary.simpleMessage('Settings'), + 'settings_mark_deleted_played_label': + MessageLookupByLibrary.simpleMessage( + 'Mark deleted episodes as played'), + 'settings_personalisation_divider_label': + MessageLookupByLibrary.simpleMessage('Personalisation'), + 'settings_playback_divider_label': + MessageLookupByLibrary.simpleMessage('Playback'), + 'settings_theme_switch_label': + MessageLookupByLibrary.simpleMessage('Dark theme'), + 'show_notes_label': MessageLookupByLibrary.simpleMessage('Show notes'), + 'sleep_episode_label': + MessageLookupByLibrary.simpleMessage('End of episode'), + 'sleep_minute_label': m0, + 'sleep_off_label': MessageLookupByLibrary.simpleMessage('Off'), + 'sleep_timer_label': + MessageLookupByLibrary.simpleMessage('Sleep Timer'), + 'stop_download_button_label': + MessageLookupByLibrary.simpleMessage('Stop'), + 'stop_download_confirmation': MessageLookupByLibrary.simpleMessage( + 'Are you sure you wish to stop this download and delete the episode?'), + 'stop_download_title': + MessageLookupByLibrary.simpleMessage('Stop Download'), + 'subscribe_button_label': + MessageLookupByLibrary.simpleMessage('Follow'), + 'subscribe_label': MessageLookupByLibrary.simpleMessage('Follow'), + 'transcript_label': MessageLookupByLibrary.simpleMessage('Transcript'), + 'transcript_why_not_label': + MessageLookupByLibrary.simpleMessage('Why not?'), + 'transcript_why_not_url': MessageLookupByLibrary.simpleMessage( + 'https://www.pinepods.online/docs/Features/Transcript'), + 'unsubscribe_button_label': + MessageLookupByLibrary.simpleMessage('Unfollow'), + 'unsubscribe_label': MessageLookupByLibrary.simpleMessage('Unfollow'), + 'unsubscribe_message': MessageLookupByLibrary.simpleMessage( + 'Unfollowing will delete all downloaded episodes of this podcast.'), + 'up_next_queue_label': MessageLookupByLibrary.simpleMessage('Up Next') + }; } diff --git a/mobile/lib/main.dart b/mobile/lib/main.dart index 30e8b16f..93f15a5f 100644 --- a/mobile/lib/main.dart +++ b/mobile/lib/main.dart @@ -5,6 +5,7 @@ import 'dart:io'; import 'package:pinepods_mobile/services/settings/mobile_settings_service.dart'; +import 'package:pinepods_mobile/services/logging/app_logger.dart'; import 'package:pinepods_mobile/ui/pinepods_podcast_app.dart'; import 'package:pinepods_mobile/ui/widgets/restart_widget.dart'; import 'package:device_info_plus/device_info_plus.dart'; @@ -18,14 +19,43 @@ void main() async { WidgetsFlutterBinding.ensureInitialized(); SystemChrome.setSystemUIOverlayStyle(const SystemUiOverlayStyle(statusBarColor: Colors.transparent)); + // Initialize app logger + final appLogger = AppLogger(); + await appLogger.initialize(); + Logger.root.level = Level.FINE; Logger.root.onRecord.listen((record) { print('${record.level.name}: - ${record.time}: ${record.loggerName}: ${record.message}'); + + // Also log to our app logger + LogLevel appLogLevel; + switch (record.level.name) { + case 'SEVERE': + appLogLevel = LogLevel.critical; + break; + case 'WARNING': + appLogLevel = LogLevel.warning; + break; + case 'INFO': + appLogLevel = LogLevel.info; + break; + case 'FINE': + case 'FINER': + case 'FINEST': + appLogLevel = LogLevel.debug; + break; + default: + appLogLevel = LogLevel.info; + break; + } + + appLogger.log(appLogLevel, record.loggerName, record.message); }); var mobileSettingsService = (await MobileSettingsService.instance())!; certificateAuthorityBytes = await setupCertificateAuthority(); + runApp(RestartWidget( child: PinepodsPodcastApp( @@ -66,3 +96,4 @@ Future> setupCertificateAuthority() async { return ca; } + diff --git a/mobile/lib/repository/sembast/sembast_repository.dart b/mobile/lib/repository/sembast/sembast_repository.dart index a99a2e5b..0defaba1 100644 --- a/mobile/lib/repository/sembast/sembast_repository.dart +++ b/mobile/lib/repository/sembast/sembast_repository.dart @@ -36,7 +36,7 @@ class SembastRepository extends Repository { SembastRepository({ bool cleanup = true, - String databaseName = 'anytime.db', + String databaseName = 'pinepods.db', }) { _databaseService = DatabaseService(databaseName, version: 2, upgraderCallback: dbUpgrader); diff --git a/mobile/lib/services/audio/default_audio_player_service.dart b/mobile/lib/services/audio/default_audio_player_service.dart index 26f573ba..70abce86 100644 --- a/mobile/lib/services/audio/default_audio_player_service.dart +++ b/mobile/lib/services/audio/default_audio_player_service.dart @@ -134,12 +134,13 @@ class DefaultAudioPlayerService extends AudioPlayerService { /// Set the PinepodsAudioService reference for listen duration tracking void setPinepodsAudioService(PinepodsAudioService? service) { _pinepodsAudioService = service; + log.info('PinepodsAudioService reference set for enhanced sync capabilities'); } - /// Save episode position locally (every 5 seconds) + /// Save episode position locally (every 3 seconds for more frequent updates) void _startLocalPositionSaver() { _localPositionTimer?.cancel(); - _localPositionTimer = Timer.periodic(const Duration(seconds: 5), (_) { + _localPositionTimer = Timer.periodic(const Duration(seconds: 3), (_) { _saveLocalPosition(); }); } @@ -168,13 +169,25 @@ class DefaultAudioPlayerService extends AudioPlayerService { // Get local position final localPosition = episode.position; - // Get server position if we have PinePods service + // Get server position if we have PinePods service and episode is from PinePods int serverPosition = 0; - if (_pinepodsAudioService != null) { + if (_pinepodsAudioService != null && episode.guid.startsWith('pinepods_')) { try { - // This would need to be implemented in PinepodsService - // For now, we'll use local position - serverPosition = localPosition; + // Extract episode ID from GUID (format: 'pinepods_123') + final episodeIdStr = episode.guid.replaceFirst('pinepods_', '').split('_').first; + final episodeId = int.tryParse(episodeIdStr); + + if (episodeId != null) { + final serverPos = await _pinepodsAudioService!.getServerPositionForEpisode( + episodeId, + settingsService.pinepodsUserId ?? 0, + episode.pguid?.contains('youtube') ?? false, + ); + + if (serverPos != null) { + serverPosition = (serverPos * 1000).round(); // Convert to milliseconds + } + } } catch (e) { log.warning('Failed to get server position: $e'); } @@ -182,7 +195,6 @@ class DefaultAudioPlayerService extends AudioPlayerService { // Return the furthest position final bestPosition = localPosition > serverPosition ? localPosition : serverPosition; - log.info('Best position for ${episode.title}: ${bestPosition}ms (local: ${localPosition}ms, server: ${serverPosition}ms)'); return bestPosition; } @@ -202,15 +214,34 @@ class DefaultAudioPlayerService extends AudioPlayerService { @override Future pause() async { - // Record listen duration before pausing - await _recordListenDuration(); + // Pause immediately - don't wait for server sync + await _audioHandler.pause(); // Stop local position saver while paused _stopLocalPositionSaver(); - await _audioHandler.pause(); + log.info('Episode paused - starting background sync'); - log.info('Episode paused - listen duration recorded'); + // Do server sync in background without blocking pause + _performBackgroundSync(); + } + + /// Perform server sync in background without blocking user actions + void _performBackgroundSync() async { + try { + // Record listen duration + await _recordListenDuration(); + log.info('Listen duration recorded successfully'); + + // Sync position to PinePods server + if (_pinepodsAudioService != null) { + await _pinepodsAudioService!.onPause(); + log.info('Position synced to server successfully'); + } + } catch (e) { + log.warning('Background sync failed (but pause still worked): $e'); + // Pause still succeeded even if sync failed - user experience is not affected + } } @override @@ -297,7 +328,7 @@ class DefaultAudioPlayerService extends AudioPlayerService { // Track episode start time for listen duration calculation _episodeStartTime = DateTime.now(); - // Start local position saving (every 5 seconds) + // Start local position saving (every 3 seconds for better accuracy) _startLocalPositionSaver(); log.info('Started episode tracking at ${_episodeStartTime}'); @@ -403,13 +434,18 @@ class DefaultAudioPlayerService extends AudioPlayerService { // Record listen duration before stopping await _recordListenDuration(); + // Sync position to PinePods server immediately + if (_pinepodsAudioService != null) { + await _pinepodsAudioService!.onStop(); + } + // Stop local position saver _stopLocalPositionSaver(); _currentEpisode = null; await _audioHandler.stop(); - log.info('Episode stopped - listen duration recorded'); + log.info('Episode stopped - listen duration recorded and synced to server'); } @override @@ -842,6 +878,8 @@ class DefaultAudioPlayerService extends AudioPlayerService { var sub = _currentEpisode!.transcriptUrls.firstWhereOrNull((element) => element.type == TranscriptFormat.json); sub ??= _currentEpisode!.transcriptUrls.firstWhereOrNull((element) => element.type == TranscriptFormat.subrip); + + sub ??= _currentEpisode!.transcriptUrls.firstWhereOrNull((element) => element.type == TranscriptFormat.html); if (sub != null) { _updateTranscriptState(state: TranscriptLoadingState()); @@ -1200,6 +1238,7 @@ class _DefaultAudioPlayerHandler extends BaseAudioHandler with SeekHandler { log.fine('pause() triggered - saving position'); await _savePosition(); await _player.pause(); + log.info('Audio handler pause completed - position saved'); } @override @@ -1210,6 +1249,7 @@ class _DefaultAudioPlayerHandler extends BaseAudioHandler with SeekHandler { await _savePosition(); await super.stop(); + log.info('Audio handler stop completed - position saved'); } @override diff --git a/mobile/lib/services/auth_notifier.dart b/mobile/lib/services/auth_notifier.dart new file mode 100644 index 00000000..6ee7de0c --- /dev/null +++ b/mobile/lib/services/auth_notifier.dart @@ -0,0 +1,18 @@ +import 'package:flutter/material.dart'; + +// Global authentication notifier for cross-context communication +class AuthNotifier { + static VoidCallback? _globalLoginSuccessCallback; + + static void setGlobalLoginSuccessCallback(VoidCallback? callback) { + _globalLoginSuccessCallback = callback; + } + + static void notifyLoginSuccess() { + _globalLoginSuccessCallback?.call(); + } + + static void clearGlobalLoginSuccessCallback() { + _globalLoginSuccessCallback = null; + } +} \ No newline at end of file diff --git a/mobile/lib/services/download/mobile_download_service.dart b/mobile/lib/services/download/mobile_download_service.dart index e008c6f6..70d0c1c3 100644 --- a/mobile/lib/services/download/mobile_download_service.dart +++ b/mobile/lib/services/download/mobile_download_service.dart @@ -62,6 +62,8 @@ class MobileDownloadService extends DownloadService { var sub = episode.transcriptUrls.firstWhereOrNull((element) => element.type == TranscriptFormat.json); sub ??= episode.transcriptUrls.firstWhereOrNull((element) => element.type == TranscriptFormat.subrip); + + sub ??= episode.transcriptUrls.firstWhereOrNull((element) => element.type == TranscriptFormat.html); if (sub != null) { var transcript = await podcastService.loadTranscriptByUrl(transcriptUrl: sub); diff --git a/mobile/lib/services/error_handling_service.dart b/mobile/lib/services/error_handling_service.dart new file mode 100644 index 00000000..377de924 --- /dev/null +++ b/mobile/lib/services/error_handling_service.dart @@ -0,0 +1,202 @@ +// lib/services/error_handling_service.dart +import 'dart:io'; +import 'package:http/http.dart' as http; + +/// Service for handling and categorizing errors, especially server connection issues +class ErrorHandlingService { + /// Checks if an error indicates a server connection issue + static bool isServerConnectionError(dynamic error) { + if (error == null) return false; + + final errorString = error.toString().toLowerCase(); + + // Network-related errors + if (error is SocketException) return true; + if (error is HttpException) return true; + if (error is http.ClientException) return true; + + // Check for common connection error patterns + final connectionErrorPatterns = [ + 'connection refused', + 'connection timeout', + 'connection failed', + 'network is unreachable', + 'no route to host', + 'connection reset', + 'connection aborted', + 'host is unreachable', + 'server unavailable', + 'service unavailable', + 'bad gateway', + 'gateway timeout', + 'connection timed out', + 'failed host lookup', + 'no address associated with hostname', + 'network unreachable', + 'operation timed out', + 'handshake failure', + 'certificate verify failed', + 'ssl handshake failed', + 'unable to connect', + 'server closed the connection', + 'connection closed', + 'broken pipe', + 'no internet connection', + 'offline', + 'dns lookup failed', + 'name resolution failed', + ]; + + return connectionErrorPatterns.any((pattern) => errorString.contains(pattern)); + } + + /// Checks if an error indicates authentication/authorization issues + static bool isAuthenticationError(dynamic error) { + if (error == null) return false; + + final errorString = error.toString().toLowerCase(); + + final authErrorPatterns = [ + 'unauthorized', + 'authentication failed', + 'invalid credentials', + 'access denied', + 'forbidden', + 'token expired', + 'invalid token', + 'login required', + '401', + '403', + ]; + + return authErrorPatterns.any((pattern) => errorString.contains(pattern)); + } + + /// Checks if an error indicates server-side issues (5xx errors) + static bool isServerError(dynamic error) { + if (error == null) return false; + + final errorString = error.toString().toLowerCase(); + + final serverErrorPatterns = [ + 'internal server error', + 'server error', + 'service unavailable', + 'bad gateway', + 'gateway timeout', + '500', + '502', + '503', + '504', + '505', + ]; + + return serverErrorPatterns.any((pattern) => errorString.contains(pattern)); + } + + /// Gets a user-friendly error message based on the error type + static String getUserFriendlyErrorMessage(dynamic error) { + if (error == null) return 'An unknown error occurred'; + + if (isServerConnectionError(error)) { + return 'Unable to connect to the PinePods server. Please check your internet connection and server settings.'; + } + + if (isAuthenticationError(error)) { + return 'Authentication failed. Please check your login credentials.'; + } + + if (isServerError(error)) { + return 'The PinePods server is experiencing issues. Please try again later.'; + } + + // Return the original error message for other types of errors + return error.toString(); + } + + /// Gets an appropriate title for the error + static String getErrorTitle(dynamic error) { + if (error == null) return 'Error'; + + if (isServerConnectionError(error)) { + return 'Server Unavailable'; + } + + if (isAuthenticationError(error)) { + return 'Authentication Error'; + } + + if (isServerError(error)) { + return 'Server Error'; + } + + return 'Error'; + } + + /// Gets troubleshooting suggestions based on the error type + static List getTroubleshootingSteps(dynamic error) { + if (error == null) return ['Please try again later']; + + if (isServerConnectionError(error)) { + return [ + 'Check your internet connection', + 'Verify server URL in settings', + 'Ensure the PinePods server is running', + 'Check if the server port is accessible', + 'Contact your administrator if the issue persists', + ]; + } + + if (isAuthenticationError(error)) { + return [ + 'Check your username and password', + 'Ensure your account is still active', + 'Try logging out and logging back in', + 'Contact your administrator for help', + ]; + } + + if (isServerError(error)) { + return [ + 'Wait a few minutes and try again', + 'Check if the server is overloaded', + 'Contact your administrator', + 'Check server logs for more details', + ]; + } + + return [ + 'Try refreshing the page', + 'Restart the app if the issue persists', + 'Contact support for assistance', + ]; + } + + /// Wraps an async function call with error handling + static Future handleApiCall( + Future Function() apiCall, { + String? context, + }) async { + try { + return await apiCall(); + } catch (error) { + // Log the error with context if provided + if (context != null) { + print('API Error in $context: $error'); + } + + // Re-throw the error to be handled by the UI layer + rethrow; + } + } +} + +/// Extension to make error checking easier +extension ErrorTypeExtension on dynamic { + bool get isServerConnectionError => ErrorHandlingService.isServerConnectionError(this); + bool get isAuthenticationError => ErrorHandlingService.isAuthenticationError(this); + bool get isServerError => ErrorHandlingService.isServerError(this); + String get userFriendlyMessage => ErrorHandlingService.getUserFriendlyErrorMessage(this); + String get errorTitle => ErrorHandlingService.getErrorTitle(this); + List get troubleshootingSteps => ErrorHandlingService.getTroubleshootingSteps(this); +} \ No newline at end of file diff --git a/mobile/lib/services/global_services.dart b/mobile/lib/services/global_services.dart new file mode 100644 index 00000000..7e542388 --- /dev/null +++ b/mobile/lib/services/global_services.dart @@ -0,0 +1,35 @@ +// lib/services/global_services.dart +import 'package:pinepods_mobile/services/pinepods/pinepods_audio_service.dart'; +import 'package:pinepods_mobile/services/pinepods/pinepods_service.dart'; + +/// Global service access point for the app +class GlobalServices { + static PinepodsAudioService? _pinepodsAudioService; + static PinepodsService? _pinepodsService; + + /// Set the global services (called from PinepodsPodcastApp) + static void initialize({ + required PinepodsAudioService pinepodsAudioService, + required PinepodsService pinepodsService, + }) { + _pinepodsAudioService = pinepodsAudioService; + _pinepodsService = pinepodsService; + } + + /// Update global service credentials (called when user logs in or settings change) + static void setCredentials(String server, String apiKey) { + _pinepodsService?.setCredentials(server, apiKey); + } + + /// Get the global PinepodsAudioService instance + static PinepodsAudioService? get pinepodsAudioService => _pinepodsAudioService; + + /// Get the global PinepodsService instance + static PinepodsService? get pinepodsService => _pinepodsService; + + /// Clear services (for testing or cleanup) + static void clear() { + _pinepodsAudioService = null; + _pinepodsService = null; + } +} \ No newline at end of file diff --git a/mobile/lib/services/logging/app_logger.dart b/mobile/lib/services/logging/app_logger.dart new file mode 100644 index 00000000..bcad2443 --- /dev/null +++ b/mobile/lib/services/logging/app_logger.dart @@ -0,0 +1,488 @@ +// lib/services/logging/app_logger.dart +import 'dart:io'; +import 'dart:collection'; +import 'dart:convert'; +import 'package:flutter/foundation.dart'; +import 'package:flutter/services.dart'; +import 'package:device_info_plus/device_info_plus.dart'; +import 'package:package_info_plus/package_info_plus.dart'; +import 'package:path_provider/path_provider.dart'; +import 'package:path/path.dart' as path_helper; +import 'package:intl/intl.dart'; + +enum LogLevel { + debug, + info, + warning, + error, + critical, +} + +class LogEntry { + final DateTime timestamp; + final LogLevel level; + final String tag; + final String message; + final String? stackTrace; + + LogEntry({ + required this.timestamp, + required this.level, + required this.tag, + required this.message, + this.stackTrace, + }); + + String get levelString { + switch (level) { + case LogLevel.debug: + return 'DEBUG'; + case LogLevel.info: + return 'INFO'; + case LogLevel.warning: + return 'WARN'; + case LogLevel.error: + return 'ERROR'; + case LogLevel.critical: + return 'CRITICAL'; + } + } + + String get formattedMessage { + final timeStr = timestamp.toString().substring(0, 19); // Remove milliseconds for readability + var result = '[$timeStr] [$levelString] [$tag] $message'; + if (stackTrace != null && stackTrace!.isNotEmpty) { + result += '\nStackTrace: $stackTrace'; + } + return result; + } +} + +class DeviceInfo { + final String platform; + final String osVersion; + final String model; + final String manufacturer; + final String appVersion; + final String buildNumber; + + DeviceInfo({ + required this.platform, + required this.osVersion, + required this.model, + required this.manufacturer, + required this.appVersion, + required this.buildNumber, + }); + + String get formattedInfo { + return ''' +Device Information: +- Platform: $platform +- OS Version: $osVersion +- Model: $model +- Manufacturer: $manufacturer +- App Version: $appVersion +- Build Number: $buildNumber +'''; + } +} + +class AppLogger { + static final AppLogger _instance = AppLogger._internal(); + factory AppLogger() => _instance; + AppLogger._internal(); + + static const int maxLogEntries = 1000; // Keep last 1000 log entries in memory + static const int maxSessionFiles = 5; // Keep last 5 session log files + static const String crashLogFileName = 'pinepods_last_crash.txt'; + + final Queue _logs = Queue(); + DeviceInfo? _deviceInfo; + File? _currentSessionFile; + File? _crashLogFile; + Directory? _logsDirectory; + String? _sessionId; + bool _isInitialized = false; + + // Initialize the logger and collect device info + Future initialize() async { + if (_isInitialized) return; + + await _collectDeviceInfo(); + await _initializeLogFiles(); + await _setupCrashHandler(); + await _loadPreviousCrash(); + + _isInitialized = true; + + // Log initialization + log(LogLevel.info, 'AppLogger', 'Logger initialized successfully'); + } + + Future _collectDeviceInfo() async { + try { + final deviceInfoPlugin = DeviceInfoPlugin(); + final packageInfo = await PackageInfo.fromPlatform(); + + if (Platform.isAndroid) { + final androidInfo = await deviceInfoPlugin.androidInfo; + _deviceInfo = DeviceInfo( + platform: 'Android', + osVersion: 'Android ${androidInfo.version.release} (API ${androidInfo.version.sdkInt})', + model: '${androidInfo.manufacturer} ${androidInfo.model}', + manufacturer: androidInfo.manufacturer, + appVersion: packageInfo.version, + buildNumber: packageInfo.buildNumber, + ); + } else if (Platform.isIOS) { + final iosInfo = await deviceInfoPlugin.iosInfo; + _deviceInfo = DeviceInfo( + platform: 'iOS', + osVersion: '${iosInfo.systemName} ${iosInfo.systemVersion}', + model: iosInfo.model, + manufacturer: 'Apple', + appVersion: packageInfo.version, + buildNumber: packageInfo.buildNumber, + ); + } else { + _deviceInfo = DeviceInfo( + platform: Platform.operatingSystem, + osVersion: Platform.operatingSystemVersion, + model: 'Unknown', + manufacturer: 'Unknown', + appVersion: packageInfo.version, + buildNumber: packageInfo.buildNumber, + ); + } + } catch (e) { + // If device info collection fails, create a basic info object + try { + final packageInfo = await PackageInfo.fromPlatform(); + _deviceInfo = DeviceInfo( + platform: Platform.operatingSystem, + osVersion: Platform.operatingSystemVersion, + model: 'Unknown', + manufacturer: 'Unknown', + appVersion: packageInfo.version, + buildNumber: packageInfo.buildNumber, + ); + } catch (e2) { + _deviceInfo = DeviceInfo( + platform: 'Unknown', + osVersion: 'Unknown', + model: 'Unknown', + manufacturer: 'Unknown', + appVersion: 'Unknown', + buildNumber: 'Unknown', + ); + } + } + } + + void log(LogLevel level, String tag, String message, [String? stackTrace]) { + final entry = LogEntry( + timestamp: DateTime.now(), + level: level, + tag: tag, + message: message, + stackTrace: stackTrace, + ); + + _logs.add(entry); + + // Keep only the last maxLogEntries in memory + while (_logs.length > maxLogEntries) { + _logs.removeFirst(); + } + + // Write to current session file asynchronously (don't await to avoid blocking) + _writeToSessionFile(entry); + + // Also print to console in debug mode + if (kDebugMode) { + print(entry.formattedMessage); + } + } + + // Convenience methods for different log levels + void debug(String tag, String message) => log(LogLevel.debug, tag, message); + void info(String tag, String message) => log(LogLevel.info, tag, message); + void warning(String tag, String message) => log(LogLevel.warning, tag, message); + void error(String tag, String message, [String? stackTrace]) => log(LogLevel.error, tag, message, stackTrace); + void critical(String tag, String message, [String? stackTrace]) => log(LogLevel.critical, tag, message, stackTrace); + + // Log an exception with automatic stack trace + void logException(String tag, String message, dynamic exception, [StackTrace? stackTrace]) { + final stackTraceStr = stackTrace?.toString() ?? exception.toString(); + error(tag, '$message: $exception', stackTraceStr); + } + + // Get all logs + List get logs => _logs.toList(); + + // Get logs filtered by level + List getLogsByLevel(LogLevel level) { + return _logs.where((log) => log.level == level).toList(); + } + + // Get logs from a specific time period + List getLogsInTimeRange(DateTime start, DateTime end) { + return _logs.where((log) => + log.timestamp.isAfter(start) && log.timestamp.isBefore(end) + ).toList(); + } + + // Get formatted log string for copying + String getFormattedLogs() { + final buffer = StringBuffer(); + + // Add device info + if (_deviceInfo != null) { + buffer.writeln(_deviceInfo!.formattedInfo); + } + + // Add separator + buffer.writeln('=' * 50); + buffer.writeln('Application Logs:'); + buffer.writeln('=' * 50); + + // Add all logs + for (final log in _logs) { + buffer.writeln(log.formattedMessage); + } + + // Add footer + buffer.writeln(); + buffer.writeln('=' * 50); + buffer.writeln('End of logs - Total entries: ${_logs.length}'); + buffer.writeln('Bug reports: https://github.com/madeofpendletonwool/pinepods/issues'); + + return buffer.toString(); + } + + // Clear all logs + void clearLogs() { + _logs.clear(); + log(LogLevel.info, 'AppLogger', 'Logs cleared by user'); + } + + // Initialize log files and directory structure + Future _initializeLogFiles() async { + try { + final appDocDir = await getApplicationDocumentsDirectory(); + _logsDirectory = Directory(path_helper.join(appDocDir.path, 'logs')); + + // Create logs directory if it doesn't exist + if (!await _logsDirectory!.exists()) { + await _logsDirectory!.create(recursive: true); + } + + // Clean up old session files (keep only last 5) + await _cleanupOldSessionFiles(); + + // Create new session file + _sessionId = DateFormat('yyyyMMdd_HHmmss').format(DateTime.now()); + _currentSessionFile = File(path_helper.join(_logsDirectory!.path, 'session_$_sessionId.log')); + await _currentSessionFile!.create(); + + // Initialize crash log file + _crashLogFile = File(path_helper.join(_logsDirectory!.path, crashLogFileName)); + if (!await _crashLogFile!.exists()) { + await _crashLogFile!.create(); + } + + log(LogLevel.info, 'AppLogger', 'Session log files initialized at ${_logsDirectory!.path}'); + } catch (e) { + if (kDebugMode) { + print('Failed to initialize log files: $e'); + } + } + } + + // Clean up old session files, keeping only the most recent ones + Future _cleanupOldSessionFiles() async { + try { + final files = await _logsDirectory!.list().toList(); + final sessionFiles = files + .whereType() + .where((f) => path_helper.basename(f.path).startsWith('session_')) + .toList(); + + // Sort by last modified date (newest first) + sessionFiles.sort((a, b) => b.lastModifiedSync().compareTo(a.lastModifiedSync())); + + // Delete files beyond the limit + if (sessionFiles.length > maxSessionFiles) { + for (int i = maxSessionFiles; i < sessionFiles.length; i++) { + await sessionFiles[i].delete(); + } + } + } catch (e) { + if (kDebugMode) { + print('Failed to cleanup old session files: $e'); + } + } + } + + // Write log entry to current session file + Future _writeToSessionFile(LogEntry entry) async { + if (_currentSessionFile == null) return; + + try { + await _currentSessionFile!.writeAsString( + '${entry.formattedMessage}\n', + mode: FileMode.append, + ); + } catch (e) { + // Silently fail to avoid logging loops + if (kDebugMode) { + print('Failed to write log to session file: $e'); + } + } + } + + // Setup crash handler + Future _setupCrashHandler() async { + FlutterError.onError = (FlutterErrorDetails details) { + _logCrash('Flutter Error', details.exception.toString(), details.stack); + // Still call the default error handler + FlutterError.presentError(details); + }; + + PlatformDispatcher.instance.onError = (error, stack) { + _logCrash('Platform Error', error.toString(), stack); + return true; // Mark as handled + }; + } + + // Log crash to persistent storage + Future _logCrash(String type, String error, StackTrace? stackTrace) async { + try { + final crashInfo = { + 'timestamp': DateTime.now().toIso8601String(), + 'sessionId': _sessionId, + 'type': type, + 'error': error, + 'stackTrace': stackTrace?.toString(), + 'deviceInfo': _deviceInfo?.formattedInfo, + 'recentLogs': _logs.length > 20 ? _logs.skip(_logs.length - 20).map((e) => e.formattedMessage).toList() : _logs.map((e) => e.formattedMessage).toList(), // Only last 20 entries + }; + + if (_crashLogFile != null) { + await _crashLogFile!.writeAsString(jsonEncode(crashInfo)); + } + + // Also log through normal logging + critical('CrashHandler', '$type: $error', stackTrace?.toString()); + } catch (e) { + if (kDebugMode) { + print('Failed to log crash: $e'); + } + } + } + + // Load and log previous crash if exists + Future _loadPreviousCrash() async { + if (_crashLogFile == null || !await _crashLogFile!.exists()) return; + + try { + final crashData = await _crashLogFile!.readAsString(); + if (crashData.isNotEmpty) { + final crash = jsonDecode(crashData); + warning('PreviousCrash', 'Previous crash detected: ${crash['type']} at ${crash['timestamp']}'); + warning('PreviousCrash', 'Session: ${crash['sessionId'] ?? 'unknown'}'); + warning('PreviousCrash', 'Error: ${crash['error']}'); + if (crash['stackTrace'] != null) { + warning('PreviousCrash', 'Stack trace available in crash log file'); + } + } + } catch (e) { + warning('AppLogger', 'Failed to load previous crash info: $e'); + } + } + + // Get list of available session files + Future> getSessionFiles() async { + if (_logsDirectory == null) return []; + + try { + final files = await _logsDirectory!.list().toList(); + final sessionFiles = files + .whereType() + .where((f) => path_helper.basename(f.path).startsWith('session_')) + .toList(); + + // Sort by last modified date (newest first) + sessionFiles.sort((a, b) => b.lastModifiedSync().compareTo(a.lastModifiedSync())); + return sessionFiles; + } catch (e) { + return []; + } + } + + // Get current session file path + String? get currentSessionPath => _currentSessionFile?.path; + + // Get crash log file path + String? get crashLogPath => _crashLogFile?.path; + + // Get logs directory path + String? get logsDirectoryPath => _logsDirectory?.path; + + // Check if previous crash exists + Future hasPreviousCrash() async { + if (_crashLogFile == null) return false; + try { + final exists = await _crashLogFile!.exists(); + if (!exists) return false; + final content = await _crashLogFile!.readAsString(); + return content.isNotEmpty; + } catch (e) { + return false; + } + } + + // Clear crash log + Future clearCrashLog() async { + if (_crashLogFile != null && await _crashLogFile!.exists()) { + await _crashLogFile!.writeAsString(''); + } + } + + // Get formatted logs with session info + String getFormattedLogsWithSessionInfo() { + final buffer = StringBuffer(); + + // Add session info + buffer.writeln('Session ID: $_sessionId'); + buffer.writeln('Session started: ${DateTime.now().toString()}'); + + // Add device info + if (_deviceInfo != null) { + buffer.writeln(_deviceInfo!.formattedInfo); + } + + // Add separator + buffer.writeln('=' * 50); + buffer.writeln('Application Logs (Current Session):'); + buffer.writeln('=' * 50); + + // Add all logs + for (final log in _logs) { + buffer.writeln(log.formattedMessage); + } + + // Add footer + buffer.writeln(); + buffer.writeln('=' * 50); + buffer.writeln('End of logs - Total entries: ${_logs.length}'); + buffer.writeln('Session file: ${_currentSessionFile?.path}'); + buffer.writeln('Bug reports: https://github.com/madeofpendletonwool/pinepods/issues'); + + return buffer.toString(); + } + + // Get device info + DeviceInfo? get deviceInfo => _deviceInfo; +} \ No newline at end of file diff --git a/mobile/lib/services/pinepods/login_service.dart b/mobile/lib/services/pinepods/login_service.dart index fbf8434f..121ea1e7 100644 --- a/mobile/lib/services/pinepods/login_service.dart +++ b/mobile/lib/services/pinepods/login_service.dart @@ -25,8 +25,8 @@ class PinepodsLoginService { } } - /// Get API key using Basic authentication - static Future getApiKey(String serverUrl, String username, String password) async { + /// Initial login - returns either API key or MFA session info + static Future initialLogin(String serverUrl, String username, String password) async { try { final normalizedUrl = serverUrl.trim().replaceAll(RegExp(r'/$'), ''); final credentials = base64Encode(utf8.encode('$username:$password')); @@ -43,14 +43,37 @@ class PinepodsLoginService { if (response.statusCode == 200) { final data = jsonDecode(response.body); - return data['retrieved_key']; + + // Check if MFA is required + if (data['status'] == 'mfa_required' && data['mfa_required'] == true) { + return InitialLoginResponse.mfaRequired( + serverUrl: normalizedUrl, + username: username, + userId: data['user_id'], + mfaSessionToken: data['mfa_session_token'], + ); + } + + // Normal flow - no MFA required + final apiKey = data['retrieved_key']; + if (apiKey != null) { + return InitialLoginResponse.success(apiKey: apiKey); + } } - return null; + + return InitialLoginResponse.failure('Authentication failed'); } catch (e) { - return null; + return InitialLoginResponse.failure('Error: ${e.toString()}'); } } + /// Legacy method for backwards compatibility + @deprecated + static Future getApiKey(String serverUrl, String username, String password) async { + final result = await initialLogin(serverUrl, username, password); + return result.isSuccess ? result.apiKey : null; + } + /// Verify API key is valid static Future verifyApiKey(String serverUrl, String apiKey) async { try { @@ -173,7 +196,40 @@ class PinepodsLoginService { } } - /// Verify MFA code + /// Verify MFA code and get API key during login (secure flow) + static Future verifyMfaAndGetKey(String serverUrl, String mfaSessionToken, String mfaCode) async { + try { + final normalizedUrl = serverUrl.trim().replaceAll(RegExp(r'/$'), ''); + final url = Uri.parse('$normalizedUrl/api/data/verify_mfa_and_get_key'); + + final requestBody = jsonEncode({ + 'mfa_session_token': mfaSessionToken, + 'mfa_code': mfaCode, + }); + + final response = await http.post( + url, + headers: { + 'Content-Type': 'application/json', + 'User-Agent': userAgent, + }, + body: requestBody, + ); + + if (response.statusCode == 200) { + final data = jsonDecode(response.body); + if (data['verified'] == true && data['status'] == 'success') { + return data['retrieved_key']; + } + } + return null; + } catch (e) { + return null; + } + } + + /// Legacy MFA verification (for post-login MFA checks) + @deprecated static Future verifyMfa(String serverUrl, String apiKey, int userId, String mfaCode) async { try { final normalizedUrl = serverUrl.trim().replaceAll(RegExp(r'/$'), ''); @@ -204,8 +260,8 @@ class PinepodsLoginService { } } - /// Complete login flow - static Future login(String serverUrl, String username, String password, {String? mfaCode}) async { + /// Complete login flow (new secure MFA implementation) + static Future login(String serverUrl, String username, String password) async { try { // Step 1: Verify server final isPinepods = await verifyPinepodsInstance(serverUrl); @@ -213,60 +269,143 @@ class PinepodsLoginService { return LoginResult.failure('Not a valid PinePods server'); } - // Step 2: Get API key - final apiKey = await getApiKey(serverUrl, username, password); - if (apiKey == null) { - return LoginResult.failure('Login failed. Check your credentials.'); - } - - // Step 3: Verify API key - final isValidKey = await verifyApiKey(serverUrl, apiKey); - if (!isValidKey) { - return LoginResult.failure('API key verification failed'); - } - - // Step 4: Get user ID - final userId = await getUserId(serverUrl, apiKey); - if (userId == null) { - return LoginResult.failure('Failed to get user ID'); + // Step 2: Initial login - get API key or MFA session + final initialResult = await initialLogin(serverUrl, username, password); + + if (!initialResult.isSuccess) { + return LoginResult.failure(initialResult.errorMessage ?? 'Login failed'); } - - // Step 5: Check MFA - final mfaEnabled = await checkMfaEnabled(serverUrl, apiKey, userId); - if (mfaEnabled) { - if (mfaCode == null || mfaCode.isEmpty) { - return LoginResult.mfaRequired(serverUrl, apiKey, userId); - } - - final mfaValid = await verifyMfa(serverUrl, apiKey, userId, mfaCode); - if (!mfaValid) { - return LoginResult.failure('Invalid MFA code'); - } + + if (initialResult.requiresMfa) { + // MFA required - return MFA prompt state + return LoginResult.mfaRequired( + serverUrl: initialResult.serverUrl!, + username: username, + userId: initialResult.userId!, + mfaSessionToken: initialResult.mfaSessionToken!, + ); } - // Step 6: Get user details - final userDetails = await getUserDetails(serverUrl, apiKey, userId); - if (userDetails == null) { - return LoginResult.failure('Failed to get user details'); - } + // No MFA required - complete login with API key + return await _completeLoginWithApiKey( + serverUrl, + username, + initialResult.apiKey!, + ); + } catch (e) { + return LoginResult.failure('Error: ${e.toString()}'); + } + } - // Step 7: Get API configuration - final apiConfig = await getApiConfig(serverUrl, apiKey); - if (apiConfig == null) { - return LoginResult.failure('Failed to get server configuration'); + /// Complete MFA login flow + static Future completeMfaLogin({ + required String serverUrl, + required String username, + required String mfaSessionToken, + required String mfaCode, + }) async { + try { + // Verify MFA and get API key + final apiKey = await verifyMfaAndGetKey(serverUrl, mfaSessionToken, mfaCode); + if (apiKey == null) { + return LoginResult.failure('Invalid MFA code'); } - return LoginResult.success( - serverUrl: serverUrl, - apiKey: apiKey, - userId: userId, - userDetails: userDetails, - apiConfig: apiConfig, - ); + // Complete login with verified API key + return await _completeLoginWithApiKey(serverUrl, username, apiKey); } catch (e) { return LoginResult.failure('Error: ${e.toString()}'); } } + + /// Complete login flow with API key (common logic) + static Future _completeLoginWithApiKey(String serverUrl, String username, String apiKey) async { + // Step 1: Verify API key + final isValidKey = await verifyApiKey(serverUrl, apiKey); + if (!isValidKey) { + return LoginResult.failure('API key verification failed'); + } + + // Step 2: Get user ID + final userId = await getUserId(serverUrl, apiKey); + if (userId == null) { + return LoginResult.failure('Failed to get user ID'); + } + + // Step 3: Get user details + final userDetails = await getUserDetails(serverUrl, apiKey, userId); + if (userDetails == null) { + return LoginResult.failure('Failed to get user details'); + } + + // Step 4: Get API configuration + final apiConfig = await getApiConfig(serverUrl, apiKey); + if (apiConfig == null) { + return LoginResult.failure('Failed to get server configuration'); + } + + return LoginResult.success( + serverUrl: serverUrl, + apiKey: apiKey, + userId: userId, + userDetails: userDetails, + apiConfig: apiConfig, + ); + } +} + +class InitialLoginResponse { + final bool isSuccess; + final bool requiresMfa; + final String? errorMessage; + final String? apiKey; + final String? serverUrl; + final String? username; + final int? userId; + final String? mfaSessionToken; + + InitialLoginResponse._({ + required this.isSuccess, + required this.requiresMfa, + this.errorMessage, + this.apiKey, + this.serverUrl, + this.username, + this.userId, + this.mfaSessionToken, + }); + + factory InitialLoginResponse.success({required String apiKey}) { + return InitialLoginResponse._( + isSuccess: true, + requiresMfa: false, + apiKey: apiKey, + ); + } + + factory InitialLoginResponse.mfaRequired({ + required String serverUrl, + required String username, + required int userId, + required String mfaSessionToken, + }) { + return InitialLoginResponse._( + isSuccess: true, + requiresMfa: true, + serverUrl: serverUrl, + username: username, + userId: userId, + mfaSessionToken: mfaSessionToken, + ); + } + + factory InitialLoginResponse.failure(String errorMessage) { + return InitialLoginResponse._( + isSuccess: false, + requiresMfa: false, + errorMessage: errorMessage, + ); + } } class LoginResult { @@ -275,7 +414,9 @@ class LoginResult { final String? errorMessage; final String? serverUrl; final String? apiKey; + final String? username; final int? userId; + final String? mfaSessionToken; final UserDetails? userDetails; final ApiConfig? apiConfig; @@ -285,7 +426,9 @@ class LoginResult { this.errorMessage, this.serverUrl, this.apiKey, + this.username, this.userId, + this.mfaSessionToken, this.userDetails, this.apiConfig, }); @@ -316,13 +459,19 @@ class LoginResult { ); } - factory LoginResult.mfaRequired(String serverUrl, String apiKey, int userId) { + factory LoginResult.mfaRequired({ + required String serverUrl, + required String username, + required int userId, + required String mfaSessionToken, + }) { return LoginResult._( isSuccess: false, requiresMfa: true, serverUrl: serverUrl, - apiKey: apiKey, + username: username, userId: userId, + mfaSessionToken: mfaSessionToken, ); } } diff --git a/mobile/lib/services/pinepods/oidc_service.dart b/mobile/lib/services/pinepods/oidc_service.dart new file mode 100644 index 00000000..b2bd5fce --- /dev/null +++ b/mobile/lib/services/pinepods/oidc_service.dart @@ -0,0 +1,405 @@ +import 'dart:convert'; +import 'dart:io'; +import 'dart:math'; +import 'package:crypto/crypto.dart'; +import 'package:http/http.dart' as http; +import 'package:url_launcher/url_launcher.dart'; + +class OidcService { + static const String userAgent = 'PinePods Mobile/1.0'; + static const String callbackUrlScheme = 'pinepods'; + static const String callbackPath = '/auth/callback'; + + /// Get available OIDC providers from server + static Future> getPublicProviders(String serverUrl) async { + try { + final normalizedUrl = serverUrl.trim().replaceAll(RegExp(r'/$'), ''); + final url = Uri.parse('$normalizedUrl/api/data/public_oidc_providers'); + + final response = await http.get( + url, + headers: {'User-Agent': userAgent}, + ); + + if (response.statusCode == 200) { + final data = jsonDecode(response.body); + final providers = (data['providers'] as List) + .map((provider) => OidcProvider.fromJson(provider)) + .toList(); + return providers; + } + return []; + } catch (e) { + return []; + } + } + + /// Generate PKCE code verifier and challenge for secure OIDC flow + static OidcPkce generatePkce() { + final codeVerifier = _generateCodeVerifier(); + final codeChallenge = _generateCodeChallenge(codeVerifier); + + return OidcPkce( + codeVerifier: codeVerifier, + codeChallenge: codeChallenge, + ); + } + + /// Generate random state parameter + static String generateState() { + final random = Random.secure(); + final bytes = List.generate(32, (i) => random.nextInt(256)); + return base64UrlEncode(bytes).replaceAll('=', ''); + } + + /// Store OIDC state on server (matches web implementation) + static Future storeOidcState({ + required String serverUrl, + required String state, + required String clientId, + String? originUrl, + String? codeVerifier, + }) async { + try { + final normalizedUrl = serverUrl.trim().replaceAll(RegExp(r'/$'), ''); + final url = Uri.parse('$normalizedUrl/api/auth/store_state'); + + final requestBody = jsonEncode({ + 'state': state, + 'client_id': clientId, + 'origin_url': originUrl, + 'code_verifier': codeVerifier, + }); + + final response = await http.post( + url, + headers: { + 'Content-Type': 'application/json', + 'User-Agent': userAgent, + }, + body: requestBody, + ); + + return response.statusCode == 200; + } catch (e) { + return false; + } + } + + /// Build authorization URL and return it for in-app browser use + static Future buildOidcLoginUrl({ + required OidcProvider provider, + required String serverUrl, + required String state, + OidcPkce? pkce, + }) async { + try { + // Store state on server first - use web origin for in-app browser + final stateStored = await storeOidcState( + serverUrl: serverUrl, + state: state, + clientId: provider.clientId, + originUrl: '$serverUrl/oauth/callback', // Use web callback for in-app browser + codeVerifier: pkce?.codeVerifier, // Include PKCE code verifier + ); + + if (!stateStored) { + return null; + } + + // Build authorization URL + final authUri = Uri.parse(provider.authorizationUrl); + final queryParams = { + 'client_id': provider.clientId, + 'response_type': 'code', + 'scope': provider.scope, + 'redirect_uri': '$serverUrl/api/auth/callback', + 'state': state, + }; + + // Add PKCE parameters if provided + if (pkce != null) { + queryParams['code_challenge'] = pkce.codeChallenge; + queryParams['code_challenge_method'] = 'S256'; + } + + final authUrl = authUri.replace(queryParameters: queryParams); + + return authUrl.toString(); + + } catch (e) { + return null; + } + } + + /// Extract API key from callback URL (for in-app browser) + static String? extractApiKeyFromUrl(String url) { + try { + final uri = Uri.parse(url); + + // Check if this is our callback URL with API key + if (uri.path.contains('/oauth/callback')) { + return uri.queryParameters['api_key']; + } + + return null; + } catch (e) { + return null; + } + } + + /// Handle OIDC callback and extract authentication result + static OidcCallbackResult parseCallback(String callbackUrl) { + try { + final uri = Uri.parse(callbackUrl); + final queryParams = uri.queryParameters; + + // Check for error + if (queryParams.containsKey('error')) { + return OidcCallbackResult.error( + error: queryParams['error'] ?? 'Unknown error', + errorDescription: queryParams['error_description'], + ); + } + + // Check if we have an API key directly (PinePods backend provides this) + final apiKey = queryParams['api_key']; + if (apiKey != null && apiKey.isNotEmpty) { + return OidcCallbackResult.success( + apiKey: apiKey, + state: queryParams['state'], + ); + } + + // Fallback: Extract traditional OAuth code and state + final code = queryParams['code']; + final state = queryParams['state']; + + if (code != null && state != null) { + return OidcCallbackResult.success( + code: code, + state: state, + ); + } + + return OidcCallbackResult.error( + error: 'missing_parameters', + errorDescription: 'Neither API key nor authorization code found in callback', + ); + } catch (e) { + return OidcCallbackResult.error( + error: 'parse_error', + errorDescription: e.toString(), + ); + } + } + + /// Complete OIDC authentication by verifying with server + static Future completeAuthentication({ + required String serverUrl, + required String code, + required String state, + OidcPkce? pkce, + }) async { + try { + final normalizedUrl = serverUrl.trim().replaceAll(RegExp(r'/$'), ''); + final url = Uri.parse('$normalizedUrl/api/auth/oidc_complete'); + + final requestBody = { + 'code': code, + 'state': state, + }; + + // Add PKCE verifier if provided + if (pkce != null) { + requestBody['code_verifier'] = pkce.codeVerifier; + } + + final response = await http.post( + url, + headers: { + 'Content-Type': 'application/json', + 'User-Agent': userAgent, + }, + body: jsonEncode(requestBody), + ); + + if (response.statusCode == 200) { + final data = jsonDecode(response.body); + return OidcAuthResult.success( + apiKey: data['api_key'], + userId: data['user_id'], + serverUrl: normalizedUrl, + ); + } else { + final errorData = jsonDecode(response.body); + return OidcAuthResult.failure( + errorData['error'] ?? 'Authentication failed', + ); + } + } catch (e) { + return OidcAuthResult.failure('Network error: ${e.toString()}'); + } + } + + /// Generate secure random code verifier + static String _generateCodeVerifier() { + final random = Random.secure(); + // Generate 32 random bytes (256 bits) which will create a ~43 character base64url string + final bytes = List.generate(32, (i) => random.nextInt(256)); + // Use base64url encoding (- and _ instead of + and /) and remove padding + return base64UrlEncode(bytes).replaceAll('=', ''); + } + + /// Generate code challenge from verifier using SHA256 + static String _generateCodeChallenge(String codeVerifier) { + final bytes = utf8.encode(codeVerifier); + final digest = sha256.convert(bytes); + return base64UrlEncode(digest.bytes) + .replaceAll('=', '') + .replaceAll('+', '-') + .replaceAll('/', '_'); + } +} + +/// OIDC Provider model +class OidcProvider { + final int providerId; + final String providerName; + final String clientId; + final String authorizationUrl; + final String scope; + final String? buttonColor; + final String? buttonText; + final String? buttonTextColor; + final String? iconSvg; + + OidcProvider({ + required this.providerId, + required this.providerName, + required this.clientId, + required this.authorizationUrl, + required this.scope, + this.buttonColor, + this.buttonText, + this.buttonTextColor, + this.iconSvg, + }); + + factory OidcProvider.fromJson(Map json) { + return OidcProvider( + providerId: json['provider_id'], + providerName: json['provider_name'], + clientId: json['client_id'], + authorizationUrl: json['authorization_url'], + scope: json['scope'], + buttonColor: json['button_color'], + buttonText: json['button_text'], + buttonTextColor: json['button_text_color'], + iconSvg: json['icon_svg'], + ); + } + + /// Get display text for the provider button + String get displayText => buttonText ?? 'Login with $providerName'; + + /// Get button color or default + String get buttonColorHex => buttonColor ?? '#007bff'; + + /// Get button text color or default + String get buttonTextColorHex => buttonTextColor ?? '#ffffff'; +} + +/// PKCE (Proof Key for Code Exchange) parameters +class OidcPkce { + final String codeVerifier; + final String codeChallenge; + + OidcPkce({ + required this.codeVerifier, + required this.codeChallenge, + }); +} + +/// OIDC callback parsing result +class OidcCallbackResult { + final bool isSuccess; + final String? code; + final String? state; + final String? apiKey; + final String? error; + final String? errorDescription; + + OidcCallbackResult._({ + required this.isSuccess, + this.code, + this.state, + this.apiKey, + this.error, + this.errorDescription, + }); + + factory OidcCallbackResult.success({ + String? code, + String? state, + String? apiKey, + }) { + return OidcCallbackResult._( + isSuccess: true, + code: code, + state: state, + apiKey: apiKey, + ); + } + + factory OidcCallbackResult.error({ + required String error, + String? errorDescription, + }) { + return OidcCallbackResult._( + isSuccess: false, + error: error, + errorDescription: errorDescription, + ); + } + + bool get hasApiKey => apiKey != null && apiKey!.isNotEmpty; + bool get hasCode => code != null && code!.isNotEmpty; +} + +/// OIDC authentication completion result +class OidcAuthResult { + final bool isSuccess; + final String? apiKey; + final int? userId; + final String? serverUrl; + final String? errorMessage; + + OidcAuthResult._({ + required this.isSuccess, + this.apiKey, + this.userId, + this.serverUrl, + this.errorMessage, + }); + + factory OidcAuthResult.success({ + required String apiKey, + required int userId, + required String serverUrl, + }) { + return OidcAuthResult._( + isSuccess: true, + apiKey: apiKey, + userId: userId, + serverUrl: serverUrl, + ); + } + + factory OidcAuthResult.failure(String errorMessage) { + return OidcAuthResult._( + isSuccess: false, + errorMessage: errorMessage, + ); + } +} \ No newline at end of file diff --git a/mobile/lib/services/pinepods/pinepods_audio_service.dart b/mobile/lib/services/pinepods/pinepods_audio_service.dart index 62a82277..f3f56906 100644 --- a/mobile/lib/services/pinepods/pinepods_audio_service.dart +++ b/mobile/lib/services/pinepods/pinepods_audio_service.dart @@ -21,13 +21,21 @@ class PinepodsAudioService { Timer? _userStatsTimer; int? _currentEpisodeId; int? _currentUserId; + bool _isYoutube = false; double _lastRecordedPosition = 0; + + /// Callbacks for pause/stop events + Function()? _onPauseCallback; + Function()? _onStopCallback; PinepodsAudioService( this._audioPlayerService, this._pinepodsService, - this._settingsBloc, - ); + this._settingsBloc, { + Function()? onPauseCallback, + Function()? onStopCallback, + }) : _onPauseCallback = onPauseCallback, + _onStopCallback = onStopCallback; /// Play a PinePods episode with full server integration Future playPinepodsEpisode({ @@ -44,6 +52,7 @@ class PinepodsAudioService { } _currentUserId = userId; + _isYoutube = pinepodsEpisode.isYoutube; log.info('Starting PinePods episode playback: ${pinepodsEpisode.episodeTitle}'); @@ -91,10 +100,11 @@ class PinepodsAudioService { // Add to history log.info('Adding episode $episodeId to history for user $userId'); - await _pinepodsService.addHistory( + final initialPosition = resume ? (pinepodsEpisode.listenDuration ?? 0).toDouble() : 0.0; + await _pinepodsService.recordListenDuration( episodeId, - resume ? (pinepodsEpisode.listenDuration ?? 0).toDouble() : 0, userId, + initialPosition, // Send seconds like web app does pinepodsEpisode.isYoutube, ); @@ -124,22 +134,38 @@ class PinepodsAudioService { void _startPeriodicUpdates() { _stopPeriodicUpdates(); // Clean up any existing timers - // Episode position updates every 30 seconds + log.info('Starting periodic updates - episode position every 15s, user stats every 60s'); + + // Episode position updates every 15 seconds (more frequent for reliability) _episodeUpdateTimer = Timer.periodic( - const Duration(seconds: 30), - (_) => _updateEpisodePosition(), + const Duration(seconds: 15), + (_) => _safeUpdateEpisodePosition(), ); // User listen time updates every 60 seconds _userStatsTimer = Timer.periodic( const Duration(seconds: 60), - (_) => _updateUserListenTime(), + (_) => _safeUpdateUserListenTime(), ); } + /// Safely update episode position without affecting playback + void _safeUpdateEpisodePosition() async { + try { + await _updateEpisodePosition(); + } catch (e) { + log.warning('Periodic sync completely failed but playback continues: $e'); + // Completely isolate any network failures from affecting playback + } + } + /// Update episode position on server Future _updateEpisodePosition() async { - if (_currentEpisodeId == null || _currentUserId == null) return; + // Updating episode position + if (_currentEpisodeId == null || _currentUserId == null) { + log.warning('Skipping scheduled sync - missing episode ID ($_currentEpisodeId) or user ID ($_currentUserId)'); + return; + } try { final positionState = _audioPlayerService.playPosition?.value; @@ -147,35 +173,118 @@ class PinepodsAudioService { final currentPosition = positionState.position.inSeconds.toDouble(); - // Only update if position has changed significantly - if ((currentPosition - _lastRecordedPosition).abs() > 5) { - await _pinepodsService.addHistory( + // Only update if position has changed by more than 2 seconds (more responsive) + if ((currentPosition - _lastRecordedPosition).abs() > 2) { + // Convert seconds to minutes for the API + final currentPositionMinutes = currentPosition / 60.0; + // Position changed, syncing to server + + await _pinepodsService.recordListenDuration( _currentEpisodeId!, - currentPosition, _currentUserId!, - false, // Assume not YouTube for now + currentPosition, // Send seconds like web app does + _isYoutube, ); _lastRecordedPosition = currentPosition; - log.fine('Updated episode position: ${currentPosition}s'); + // Sync completed successfully } } catch (e) { log.warning('Failed to update episode position: $e'); } } + /// Safely update user listen time without affecting playback + void _safeUpdateUserListenTime() async { + try { + await _updateUserListenTime(); + } catch (e) { + log.warning('User stats sync completely failed but playback continues: $e'); + // Completely isolate any network failures from affecting playback + } + } + /// Update user listen time statistics Future _updateUserListenTime() async { if (_currentUserId == null) return; try { await _pinepodsService.incrementListenTime(_currentUserId!); - log.fine('Updated user listen time'); + // User listen time updated } catch (e) { log.warning('Failed to update user listen time: $e'); } } + /// Sync current position to server immediately (for pause/stop events) + Future syncCurrentPositionToServer() async { + // Syncing current position to server + + if (_currentEpisodeId == null || _currentUserId == null) { + log.warning('Cannot sync - missing episode ID ($_currentEpisodeId) or user ID ($_currentUserId)'); + return; + } + + try { + final positionState = _audioPlayerService.playPosition?.value; + if (positionState == null) { + log.warning('Cannot sync - positionState is null'); + return; + } + + final currentPosition = positionState.position.inSeconds.toDouble(); + + log.info('Syncing position to server: ${currentPosition}s for episode $_currentEpisodeId'); + + await _pinepodsService.recordListenDuration( + _currentEpisodeId!, + _currentUserId!, + currentPosition, // Send seconds like web app does + _isYoutube, + ); + + _lastRecordedPosition = currentPosition; + log.info('Successfully synced position to server: ${currentPosition}s'); + } catch (e) { + log.warning('Failed to sync position to server: $e'); + log.warning('Stack trace: ${StackTrace.current}'); + } + } + + /// Get server position for current episode + Future getServerPosition() async { + if (_currentEpisodeId == null || _currentUserId == null) return null; + + try { + final episodeMetadata = await _pinepodsService.getEpisodeMetadata( + _currentEpisodeId!, + _currentUserId!, + isYoutube: _isYoutube, + ); + + return episodeMetadata?.listenDuration?.toDouble(); + } catch (e) { + log.warning('Failed to get server position: $e'); + return null; + } + } + + /// Get server position for any episode + Future getServerPositionForEpisode(int episodeId, int userId, bool isYoutube) async { + try { + final episodeMetadata = await _pinepodsService.getEpisodeMetadata( + episodeId, + userId, + isYoutube: isYoutube, + ); + + return episodeMetadata?.listenDuration?.toDouble(); + } catch (e) { + log.warning('Failed to get server position for episode $episodeId: $e'); + return null; + } + } + /// Record listen duration when episode ends or is stopped Future recordListenDuration(double listenDuration) async { if (_currentEpisodeId == null || _currentUserId == null) return; @@ -185,7 +294,7 @@ class PinepodsAudioService { _currentEpisodeId!, _currentUserId!, listenDuration, - false, // Assume not YouTube for now + _isYoutube, ); log.info('Recorded listen duration: ${listenDuration}s'); } catch (e) { @@ -193,6 +302,28 @@ class PinepodsAudioService { } } + /// Handle pause event - sync position to server + Future onPause() async { + try { + await syncCurrentPositionToServer(); + log.info('Pause event handled - position synced to server'); + } catch (e) { + log.warning('Pause sync failed but pause succeeded: $e'); + } + _onPauseCallback?.call(); + } + + /// Handle stop event - sync position to server + Future onStop() async { + try { + await syncCurrentPositionToServer(); + log.info('Stop event handled - position synced to server'); + } catch (e) { + log.warning('Stop sync failed but stop succeeded: $e'); + } + _onStopCallback?.call(); + } + /// Stop periodic updates void _stopPeriodicUpdates() { _episodeUpdateTimer?.cancel(); @@ -289,20 +420,25 @@ class PinepodsAudioService { final mimeType = transcriptData['mime_type'] ?? ''; final type = transcriptData['type'] ?? ''; - log.info('Processing transcript: url=$url, mimeType=$mimeType, type=$type'); + // Processing transcript if (url.toLowerCase().contains('.json') || mimeType.toLowerCase().contains('json') || type.toLowerCase().contains('json')) { format = TranscriptFormat.json; - log.info('Detected JSON transcript format'); + // Detected JSON transcript } else if (url.toLowerCase().contains('.srt') || mimeType.toLowerCase().contains('srt') || type.toLowerCase().contains('srt') || type.toLowerCase().contains('subrip') || url.toLowerCase().contains('subrip')) { format = TranscriptFormat.subrip; - log.info('Detected SubRip transcript format'); + // Detected SubRip transcript + } else if (url.toLowerCase().contains('transcript') || + mimeType.toLowerCase().contains('html') || + type.toLowerCase().contains('html')) { + format = TranscriptFormat.html; + // Detected HTML transcript } else { log.warning('Transcript format not recognized: mimeType=$mimeType, type=$type'); } diff --git a/mobile/lib/services/pinepods/pinepods_service.dart b/mobile/lib/services/pinepods/pinepods_service.dart index b7ccbf07..e5862bb0 100644 --- a/mobile/lib/services/pinepods/pinepods_service.dart +++ b/mobile/lib/services/pinepods/pinepods_service.dart @@ -79,10 +79,7 @@ class PinepodsService { final url = Uri.parse('$_server/api/data/verify_key'); try { - final response = await http.get( - url, - headers: {'Api-Key': _apiKey!}, - ); + final response = await http.get(url, headers: {'Api-Key': _apiKey!}); if (response.statusCode == 200) { final data = jsonDecode(response.body); @@ -105,10 +102,7 @@ class PinepodsService { final url = Uri.parse('$_server/api/data/podcasts'); try { - final response = await http.get( - url, - headers: {'Api-Key': _apiKey!}, - ); + final response = await http.get(url, headers: {'Api-Key': _apiKey!}); if (response.statusCode == 200) { final data = jsonDecode(response.body) as List; @@ -131,45 +125,45 @@ class PinepodsService { print('Making API call to: $url'); try { - final response = await http.get( - url, - headers: {'Api-Key': _apiKey!}, - ); + final response = await http.get(url, headers: {'Api-Key': _apiKey!}); - print('Get user podcasts response: ${response.statusCode} - ${response.body}'); + // User podcasts API response received if (response.statusCode == 200) { final data = jsonDecode(response.body); final List podsData = data['pods'] ?? []; - + List podcasts = []; for (var podData in podsData) { // Use episode count from server response final episodeCount = podData['episodecount'] ?? 0; - + // Create placeholder episodes to represent the count - final placeholderEpisodes = List.generate(episodeCount, (index) => - Episode( + final placeholderEpisodes = List.generate( + episodeCount, + (index) => Episode( guid: 'placeholder_$index', podcast: podData['podcastname'] ?? '', title: 'Episode ${index + 1}', - ) + ), + ); + + podcasts.add( + Podcast( + id: podData['podcastid'], + title: podData['podcastname'] ?? '', + description: podData['description'] ?? '', + imageUrl: podData['artworkurl'] ?? '', + thumbImageUrl: podData['artworkurl'] ?? '', + url: podData['feedurl'] ?? '', + link: podData['websiteurl'] ?? '', + copyright: podData['author'] ?? '', + guid: podData['feedurl'] ?? '', + episodes: placeholderEpisodes, + ), ); - - podcasts.add(Podcast( - id: podData['podcastid'], - title: podData['podcastname'] ?? '', - description: podData['description'] ?? '', - imageUrl: podData['artworkurl'] ?? '', - thumbImageUrl: podData['artworkurl'] ?? '', - url: podData['feedurl'] ?? '', - link: podData['websiteurl'] ?? '', - copyright: podData['author'] ?? '', - guid: podData['feedurl'] ?? '', - episodes: placeholderEpisodes, - )); } - + return podcasts; } else { throw Exception('Failed to get user podcasts: ${response.statusCode}'); @@ -191,16 +185,13 @@ class PinepodsService { try { final response = await http.get( url, - headers: { - 'Api-Key': _apiKey!, - 'Content-Type': 'application/json', - }, + headers: {'Api-Key': _apiKey!, 'Content-Type': 'application/json'}, ); if (response.statusCode == 200) { final responseText = response.body; final data = jsonDecode(responseText); - + // Handle the response structure from the web implementation if (data is Map && data['episodes'] != null) { final episodesList = data['episodes'] as List; @@ -216,7 +207,9 @@ class PinepodsService { return []; } } else { - throw Exception('Failed to fetch recent episodes: ${response.statusCode} ${response.reasonPhrase}'); + throw Exception( + 'Failed to fetch recent episodes: ${response.statusCode} ${response.reasonPhrase}', + ); } } catch (e) { print('Error fetching recent episodes: $e'); @@ -237,13 +230,17 @@ class PinepodsService { String? get server => _server; // Check if episode exists in database - Future checkEpisodeInDb(int userId, String episodeTitle, String episodeUrl) async { + Future checkEpisodeInDb( + int userId, + String episodeTitle, + String episodeUrl, + ) async { if (_server == null || _apiKey == null) { throw Exception('Not authenticated'); } final url = Uri.parse('$_server/api/data/check_episode_in_db'); - + try { final requestBody = jsonEncode({ 'user_id': userId, @@ -253,10 +250,7 @@ class PinepodsService { final response = await http.post( url, - headers: { - 'Api-Key': _apiKey!, - 'Content-Type': 'application/json', - }, + headers: {'Api-Key': _apiKey!, 'Content-Type': 'application/json'}, body: requestBody, ); @@ -272,20 +266,22 @@ class PinepodsService { } // Get episode ID from title and URL - Future getEpisodeId(int userId, String episodeTitle, String episodeUrl, bool isYoutube) async { + Future getEpisodeId( + int userId, + String episodeTitle, + String episodeUrl, + bool isYoutube, + ) async { if (_server == null || _apiKey == null) { throw Exception('Not authenticated'); } - final url = Uri.parse('$_server/api/data/get_episode_id_ep_name?user_id=$userId&episode_url=${Uri.encodeComponent(episodeUrl)}&episode_title=${Uri.encodeComponent(episodeTitle)}&is_youtube=$isYoutube'); - + final url = Uri.parse( + '$_server/api/data/get_episode_id_ep_name?user_id=$userId&episode_url=${Uri.encodeComponent(episodeUrl)}&episode_title=${Uri.encodeComponent(episodeTitle)}&is_youtube=$isYoutube', + ); + try { - final response = await http.get( - url, - headers: { - 'Api-Key': _apiKey!, - }, - ); + final response = await http.get(url, headers: {'Api-Key': _apiKey!}); if (response.statusCode == 200) { // Parse the response as a plain integer @@ -300,14 +296,19 @@ class PinepodsService { } // Add episode to history - Future addHistory(int episodeId, double episodePos, int userId, bool isYoutube) async { + Future addHistory( + int episodeId, + double episodePos, + int userId, + bool isYoutube, + ) async { if (_server == null || _apiKey == null) { throw Exception('Not authenticated'); } final url = Uri.parse('$_server/api/data/record_podcast_history'); print('Making API call to: $url'); - + try { final requestBody = jsonEncode({ 'episode_id': episodeId, @@ -318,14 +319,11 @@ class PinepodsService { final response = await http.post( url, - headers: { - 'Api-Key': _apiKey!, - 'Content-Type': 'application/json', - }, + headers: {'Api-Key': _apiKey!, 'Content-Type': 'application/json'}, body: requestBody, ); - print('Add history response: ${response.statusCode} - ${response.body}'); + // History API response received return response.statusCode == 200; } catch (e) { print('Error adding history: $e'); @@ -341,7 +339,7 @@ class PinepodsService { final url = Uri.parse('$_server/api/data/queue_pod'); print('Making API call to: $url'); - + try { final requestBody = jsonEncode({ 'episode_id': episodeId, @@ -351,14 +349,11 @@ class PinepodsService { final response = await http.post( url, - headers: { - 'Api-Key': _apiKey!, - 'Content-Type': 'application/json', - }, + headers: {'Api-Key': _apiKey!, 'Content-Type': 'application/json'}, body: requestBody, ); - print('Queue pod response: ${response.statusCode} - ${response.body}'); + // Queue API response received return response.statusCode == 200; } catch (e) { print('Error queueing episode: $e'); @@ -374,16 +369,13 @@ class PinepodsService { final url = Uri.parse('$_server/api/data/increment_played/$userId'); print('Making API call to: $url'); - + try { - final response = await http.put( - url, - headers: { - 'Api-Key': _apiKey!, - }, - ); + final response = await http.put(url, headers: {'Api-Key': _apiKey!}); - print('Increment played response: ${response.statusCode} - ${response.body}'); + print( + 'Increment played response: ${response.statusCode} - ${response.body}', + ); return response.statusCode == 200; } catch (e) { print('Error incrementing played: $e'); @@ -392,20 +384,23 @@ class PinepodsService { } // Get podcast ID from episode - Future getPodcastIdFromEpisode(int episodeId, int userId, bool isYoutube) async { + Future getPodcastIdFromEpisode( + int episodeId, + int userId, + bool isYoutube, + ) async { if (_server == null || _apiKey == null) { throw Exception('Not authenticated'); } - final url = Uri.parse('$_server/api/data/get_podcast_id_from_ep/$episodeId'); - + final url = Uri.parse( + '$_server/api/data/get_podcast_id_from_ep/$episodeId', + ); + try { final response = await http.get( url, - headers: { - 'Api-Key': _apiKey!, - 'Content-Type': 'application/json', - }, + headers: {'Api-Key': _apiKey!, 'Content-Type': 'application/json'}, ); if (response.statusCode == 200) { @@ -420,14 +415,18 @@ class PinepodsService { } // Get play episode details (playback speed, skip times) - Future getPlayEpisodeDetails(int userId, int podcastId, bool isYoutube) async { + Future getPlayEpisodeDetails( + int userId, + int podcastId, + bool isYoutube, + ) async { if (_server == null || _apiKey == null) { throw Exception('Not authenticated'); } final url = Uri.parse('$_server/api/data/get_play_episode_details'); print('Making API call to: $url'); - + try { final requestBody = jsonEncode({ 'user_id': userId, @@ -437,14 +436,13 @@ class PinepodsService { final response = await http.post( url, - headers: { - 'Api-Key': _apiKey!, - 'Content-Type': 'application/json', - }, + headers: {'Api-Key': _apiKey!, 'Content-Type': 'application/json'}, body: requestBody, ); - print('Play episode details response: ${response.statusCode} - ${response.body}'); + print( + 'Play episode details response: ${response.statusCode} - ${response.body}', + ); if (response.statusCode == 200) { final data = jsonDecode(response.body); return PlayEpisodeDetails( @@ -461,14 +459,19 @@ class PinepodsService { } // Record listen duration for episode - Future recordListenDuration(int episodeId, int userId, double listenDuration, bool isYoutube) async { + Future recordListenDuration( + int episodeId, + int userId, + double listenDuration, + bool isYoutube, + ) async { if (_server == null || _apiKey == null) { throw Exception('Not authenticated'); } final url = Uri.parse('$_server/api/data/record_listen_duration'); print('Making API call to: $url'); - + try { final requestBody = jsonEncode({ 'episode_id': episodeId, @@ -479,14 +482,13 @@ class PinepodsService { final response = await http.post( url, - headers: { - 'Api-Key': _apiKey!, - 'Content-Type': 'application/json', - }, + headers: {'Api-Key': _apiKey!, 'Content-Type': 'application/json'}, body: requestBody, ); - print('Record listen duration response: ${response.statusCode} - ${response.body}'); + print( + 'Record listen duration response: ${response.statusCode} - ${response.body}', + ); return response.statusCode == 200; } catch (e) { print('Error recording listen duration: $e'); @@ -502,16 +504,13 @@ class PinepodsService { final url = Uri.parse('$_server/api/data/increment_listen_time/$userId'); print('Making API call to: $url'); - + try { - final response = await http.put( - url, - headers: { - 'Api-Key': _apiKey!, - }, - ); + final response = await http.put(url, headers: {'Api-Key': _apiKey!}); - print('Increment listen time response: ${response.statusCode} - ${response.body}'); + print( + 'Increment listen time response: ${response.statusCode} - ${response.body}', + ); return response.statusCode == 200; } catch (e) { print('Error incrementing listen time: $e'); @@ -527,7 +526,7 @@ class PinepodsService { final url = Uri.parse('$_server/api/data/save_episode'); print('Making API call to: $url'); - + try { final requestBody = jsonEncode({ 'episode_id': episodeId, @@ -537,14 +536,11 @@ class PinepodsService { final response = await http.post( url, - headers: { - 'Api-Key': _apiKey!, - 'Content-Type': 'application/json', - }, + headers: {'Api-Key': _apiKey!, 'Content-Type': 'application/json'}, body: requestBody, ); - print('Save episode response: ${response.statusCode} - ${response.body}'); + // Save episode API response received return response.statusCode == 200; } catch (e) { print('Error saving episode: $e'); @@ -553,14 +549,18 @@ class PinepodsService { } // Remove saved episode - Future removeSavedEpisode(int episodeId, int userId, bool isYoutube) async { + Future removeSavedEpisode( + int episodeId, + int userId, + bool isYoutube, + ) async { if (_server == null || _apiKey == null) { throw Exception('Not authenticated'); } final url = Uri.parse('$_server/api/data/remove_saved_episode'); print('Making API call to: $url'); - + try { final requestBody = jsonEncode({ 'episode_id': episodeId, @@ -570,14 +570,13 @@ class PinepodsService { final response = await http.post( url, - headers: { - 'Api-Key': _apiKey!, - 'Content-Type': 'application/json', - }, + headers: {'Api-Key': _apiKey!, 'Content-Type': 'application/json'}, body: requestBody, ); - print('Remove saved episode response: ${response.statusCode} - ${response.body}'); + print( + 'Remove saved episode response: ${response.statusCode} - ${response.body}', + ); return response.statusCode == 200; } catch (e) { print('Error removing saved episode: $e'); @@ -586,14 +585,18 @@ class PinepodsService { } // Download episode to server - Future downloadEpisode(int episodeId, int userId, bool isYoutube) async { + Future downloadEpisode( + int episodeId, + int userId, + bool isYoutube, + ) async { if (_server == null || _apiKey == null) { throw Exception('Not authenticated'); } final url = Uri.parse('$_server/api/data/download_podcast'); print('Making API call to: $url'); - + try { final requestBody = jsonEncode({ 'episode_id': episodeId, @@ -603,14 +606,13 @@ class PinepodsService { final response = await http.post( url, - headers: { - 'Api-Key': _apiKey!, - 'Content-Type': 'application/json', - }, + headers: {'Api-Key': _apiKey!, 'Content-Type': 'application/json'}, body: requestBody, ); - print('Download episode response: ${response.statusCode} - ${response.body}'); + print( + 'Download episode response: ${response.statusCode} - ${response.body}', + ); return response.statusCode == 200; } catch (e) { print('Error downloading episode: $e'); @@ -626,7 +628,7 @@ class PinepodsService { final url = Uri.parse('$_server/api/data/delete_episode'); print('Making API call to: $url'); - + try { final requestBody = jsonEncode({ 'episode_id': episodeId, @@ -636,14 +638,13 @@ class PinepodsService { final response = await http.post( url, - headers: { - 'Api-Key': _apiKey!, - 'Content-Type': 'application/json', - }, + headers: {'Api-Key': _apiKey!, 'Content-Type': 'application/json'}, body: requestBody, ); - print('Delete episode response: ${response.statusCode} - ${response.body}'); + print( + 'Delete episode response: ${response.statusCode} - ${response.body}', + ); return response.statusCode == 200; } catch (e) { print('Error deleting episode: $e'); @@ -652,14 +653,18 @@ class PinepodsService { } // Mark episode as completed - Future markEpisodeCompleted(int episodeId, int userId, bool isYoutube) async { + Future markEpisodeCompleted( + int episodeId, + int userId, + bool isYoutube, + ) async { if (_server == null || _apiKey == null) { throw Exception('Not authenticated'); } final url = Uri.parse('$_server/api/data/mark_episode_completed'); print('Making API call to: $url'); - + try { final requestBody = jsonEncode({ 'episode_id': episodeId, @@ -669,14 +674,13 @@ class PinepodsService { final response = await http.post( url, - headers: { - 'Api-Key': _apiKey!, - 'Content-Type': 'application/json', - }, + headers: {'Api-Key': _apiKey!, 'Content-Type': 'application/json'}, body: requestBody, ); - print('Mark completed response: ${response.statusCode} - ${response.body}'); + print( + 'Mark completed response: ${response.statusCode} - ${response.body}', + ); return response.statusCode == 200; } catch (e) { print('Error marking episode completed: $e'); @@ -685,14 +689,18 @@ class PinepodsService { } // Mark episode as uncompleted - Future markEpisodeUncompleted(int episodeId, int userId, bool isYoutube) async { + Future markEpisodeUncompleted( + int episodeId, + int userId, + bool isYoutube, + ) async { if (_server == null || _apiKey == null) { throw Exception('Not authenticated'); } final url = Uri.parse('$_server/api/data/mark_episode_uncompleted'); print('Making API call to: $url'); - + try { final requestBody = jsonEncode({ 'episode_id': episodeId, @@ -702,14 +710,13 @@ class PinepodsService { final response = await http.post( url, - headers: { - 'Api-Key': _apiKey!, - 'Content-Type': 'application/json', - }, + headers: {'Api-Key': _apiKey!, 'Content-Type': 'application/json'}, body: requestBody, ); - print('Mark uncompleted response: ${response.statusCode} - ${response.body}'); + print( + 'Mark uncompleted response: ${response.statusCode} - ${response.body}', + ); return response.statusCode == 200; } catch (e) { print('Error marking episode uncompleted: $e'); @@ -717,15 +724,19 @@ class PinepodsService { } } - // Remove episode from queue - Future removeQueuedEpisode(int episodeId, int userId, bool isYoutube) async { + // Remove episode from queue + Future removeQueuedEpisode( + int episodeId, + int userId, + bool isYoutube, + ) async { if (_server == null || _apiKey == null) { throw Exception('Not authenticated'); } final url = Uri.parse('$_server/api/data/remove_queued_pod'); print('Making API call to: $url'); - + try { final requestBody = jsonEncode({ 'episode_id': episodeId, @@ -735,14 +746,13 @@ class PinepodsService { final response = await http.post( url, - headers: { - 'Api-Key': _apiKey!, - 'Content-Type': 'application/json', - }, + headers: {'Api-Key': _apiKey!, 'Content-Type': 'application/json'}, body: requestBody, ); - print('Remove queued response: ${response.statusCode} - ${response.body}'); + print( + 'Remove queued response: ${response.statusCode} - ${response.body}', + ); return response.statusCode == 200; } catch (e) { print('Error removing queued episode: $e'); @@ -758,21 +768,16 @@ class PinepodsService { final url = Uri.parse('$_server/api/data/user_history/$userId'); print('Making API call to: $url'); - + try { - final response = await http.get( - url, - headers: { - 'Api-Key': _apiKey!, - }, - ); + final response = await http.get(url, headers: {'Api-Key': _apiKey!}); + + // User history API response received - print('User history response: ${response.statusCode} - ${response.body}'); - if (response.statusCode == 200) { final data = jsonDecode(response.body); final episodesList = data['data'] as List? ?? []; - + return episodesList.map((episodeData) { return PinepodsEpisode( podcastName: episodeData['podcastname'] ?? '', @@ -806,23 +811,22 @@ class PinepodsService { throw Exception('Not authenticated'); } - final url = Uri.parse('$_server/api/data/get_queued_episodes?user_id=$userId'); + final url = Uri.parse( + '$_server/api/data/get_queued_episodes?user_id=$userId', + ); print('Making API call to: $url'); - + try { - final response = await http.get( - url, - headers: { - 'Api-Key': _apiKey!, - }, + final response = await http.get(url, headers: {'Api-Key': _apiKey!}); + + print( + 'Queued episodes response: ${response.statusCode} - ${response.body}', ); - print('Queued episodes response: ${response.statusCode} - ${response.body}'); - if (response.statusCode == 200) { final data = jsonDecode(response.body); final episodesList = data['data'] as List? ?? []; - + return episodesList.map((episodeData) { return PinepodsEpisode( podcastName: episodeData['podcastname'] ?? '', @@ -836,13 +840,17 @@ class PinepodsService { episodeId: episodeData['episodeid'] ?? 0, completed: episodeData['completed'] ?? false, saved: episodeData['saved'] ?? false, - queued: episodeData['queued'] ?? true, // Should always be true for queued episodes + queued: + episodeData['queued'] ?? + true, // Should always be true for queued episodes downloaded: episodeData['downloaded'] ?? false, isYoutube: episodeData['is_youtube'] ?? false, ); }).toList(); } else { - throw Exception('Failed to load queued episodes: ${response.statusCode}'); + throw Exception( + 'Failed to load queued episodes: ${response.statusCode}', + ); } } catch (e) { print('Error getting queued episodes: $e'); @@ -858,21 +866,16 @@ class PinepodsService { final url = Uri.parse('$_server/api/data/saved_episode_list/$userId'); print('Making API call to: $url'); - + try { - final response = await http.get( - url, - headers: { - 'Api-Key': _apiKey!, - }, - ); + final response = await http.get(url, headers: {'Api-Key': _apiKey!}); + + // Saved episodes API response received - print('Saved episodes response: ${response.statusCode} - ${response.body}'); - if (response.statusCode == 200) { final data = jsonDecode(response.body); final episodesList = data['saved_episodes'] as List? ?? []; - + return episodesList.map((episodeData) { return PinepodsEpisode( podcastName: episodeData['podcastname'] ?? '', @@ -885,14 +888,18 @@ class PinepodsService { listenDuration: episodeData['listenduration'] ?? 0, episodeId: episodeData['episodeid'] ?? 0, completed: episodeData['completed'] ?? false, - saved: episodeData['saved'] ?? true, // Should always be true for saved episodes + saved: + episodeData['saved'] ?? + true, // Should always be true for saved episodes queued: episodeData['queued'] ?? false, downloaded: episodeData['downloaded'] ?? false, isYoutube: episodeData['is_youtube'] ?? false, ); }).toList(); } else { - throw Exception('Failed to load saved episodes: ${response.statusCode}'); + throw Exception( + 'Failed to load saved episodes: ${response.statusCode}', + ); } } catch (e) { print('Error getting saved episodes: $e'); @@ -901,14 +908,18 @@ class PinepodsService { } // Get episode metadata - Future getEpisodeMetadata(int episodeId, int userId, {bool isYoutube = false, bool personEpisode = false}) async { + Future getEpisodeMetadata( + int episodeId, + int userId, { + bool isYoutube = false, + bool personEpisode = false, + }) async { if (_server == null || _apiKey == null) { throw Exception('Not authenticated'); } final url = Uri.parse('$_server/api/data/get_episode_metadata'); - print('Making API call to: $url'); - + try { final requestBody = jsonEncode({ 'episode_id': episodeId, @@ -919,19 +930,14 @@ class PinepodsService { final response = await http.post( url, - headers: { - 'Api-Key': _apiKey!, - 'Content-Type': 'application/json', - }, + headers: {'Api-Key': _apiKey!, 'Content-Type': 'application/json'}, body: requestBody, ); - print('Episode metadata response: ${response.statusCode} - ${response.body}'); - if (response.statusCode == 200) { final data = jsonDecode(response.body); final episodeData = data['episode']; - + return PinepodsEpisode( podcastName: episodeData['podcastname'] ?? '', episodeTitle: episodeData['episodetitle'] ?? '', @@ -963,23 +969,19 @@ class PinepodsService { throw Exception('Not authenticated'); } - final url = Uri.parse('$_server/api/data/download_episode_list?user_id=$userId'); + final url = Uri.parse( + '$_server/api/data/download_episode_list?user_id=$userId', + ); print('Making API call to: $url'); - + try { - final response = await http.get( - url, - headers: { - 'Api-Key': _apiKey!, - }, - ); + final response = await http.get(url, headers: {'Api-Key': _apiKey!}); - print('Server downloads response: ${response.statusCode} - ${response.body}'); - if (response.statusCode == 200) { final data = jsonDecode(response.body); - final episodesList = data['downloaded_episodes'] as List? ?? []; - + final episodesList = + data['downloaded_episodes'] as List? ?? []; + return episodesList.map((episodeData) { return PinepodsEpisode( podcastName: episodeData['podcastname'] ?? '', @@ -994,12 +996,16 @@ class PinepodsService { completed: episodeData['completed'] ?? false, saved: episodeData['saved'] ?? false, queued: episodeData['queued'] ?? false, - downloaded: episodeData['downloaded'] ?? true, // Should always be true for downloaded episodes + downloaded: + episodeData['downloaded'] ?? + true, // Should always be true for downloaded episodes isYoutube: episodeData['is_youtube'] ?? false, ); }).toList(); } else { - throw Exception('Failed to load server downloads: ${response.statusCode}'); + throw Exception( + 'Failed to load server downloads: ${response.statusCode}', + ); } } catch (e) { print('Error getting server downloads: $e'); @@ -1008,7 +1014,12 @@ class PinepodsService { } // Get stream URL for episode - String getStreamUrl(int episodeId, int userId, {bool isYoutube = false, bool isLocal = false}) { + String getStreamUrl( + int episodeId, + int userId, { + bool isYoutube = false, + bool isLocal = false, + }) { if (_server == null || _apiKey == null) { throw Exception('Not authenticated'); } @@ -1024,17 +1035,22 @@ class PinepodsService { } // Search for podcasts using PinePods search API - Future searchPodcasts(String query, SearchProvider provider) async { + Future searchPodcasts( + String query, + SearchProvider provider, + ) async { const searchApiUrl = 'https://search.pinepods.online'; - final url = Uri.parse('$searchApiUrl/api/search?query=${Uri.encodeComponent(query)}&index=${provider.value}'); - + final url = Uri.parse( + '$searchApiUrl/api/search?query=${Uri.encodeComponent(query)}&index=${provider.value}', + ); + try { print('Making search request to: $url'); final response = await http.get(url); - + if (response.statusCode == 200) { final data = jsonDecode(response.body); - print('Search response: ${response.body}'); + // Search API response received return PinepodsSearchResult.fromJson(data); } else { throw Exception('Failed to search podcasts: ${response.statusCode}'); @@ -1046,23 +1062,25 @@ class PinepodsService { } // Check if a podcast is already added to the server - Future checkPodcastExists(String podcastTitle, String podcastUrl, int userId) async { + Future checkPodcastExists( + String podcastTitle, + String podcastUrl, + int userId, + ) async { if (_server == null || _apiKey == null) { return false; } - final url = Uri.parse('$_server/api/data/check_podcast') - .replace(queryParameters: { - 'user_id': userId.toString(), - 'podcast_name': podcastTitle, - 'podcast_url': podcastUrl, - }); + final url = Uri.parse('$_server/api/data/check_podcast').replace( + queryParameters: { + 'user_id': userId.toString(), + 'podcast_name': podcastTitle, + 'podcast_url': podcastUrl, + }, + ); try { - final response = await http.get( - url, - headers: {'Api-Key': _apiKey!}, - ); + final response = await http.get(url, headers: {'Api-Key': _apiKey!}); if (response.statusCode == 200) { final data = jsonDecode(response.body); @@ -1101,10 +1119,7 @@ class PinepodsService { try { final response = await http.post( url, - headers: { - 'Api-Key': _apiKey!, - 'Content-Type': 'application/json', - }, + headers: {'Api-Key': _apiKey!, 'Content-Type': 'application/json'}, body: jsonEncode(body), ); @@ -1120,7 +1135,11 @@ class PinepodsService { } // Remove a podcast from the server - Future removePodcast(String podcastTitle, String podcastUrl, int userId) async { + Future removePodcast( + String podcastTitle, + String podcastUrl, + int userId, + ) async { if (_server == null || _apiKey == null) { throw Exception('Not authenticated'); } @@ -1135,10 +1154,7 @@ class PinepodsService { try { final response = await http.post( url, - headers: { - 'Api-Key': _apiKey!, - 'Content-Type': 'application/json', - }, + headers: {'Api-Key': _apiKey!, 'Content-Type': 'application/json'}, body: jsonEncode(body), ); @@ -1167,27 +1183,28 @@ class PinepodsService { } final url = Uri.parse('$_server/api/data/get_podcast_details_dynamic') - .replace(queryParameters: { - 'user_id': userId.toString(), - 'podcast_title': podcastTitle, - 'podcast_url': podcastUrl, - 'podcast_index_id': podcastIndexId.toString(), - 'added': added.toString(), - 'display_only': displayOnly.toString(), - }); + .replace( + queryParameters: { + 'user_id': userId.toString(), + 'podcast_title': podcastTitle, + 'podcast_url': podcastUrl, + 'podcast_index_id': podcastIndexId.toString(), + 'added': added.toString(), + 'display_only': displayOnly.toString(), + }, + ); try { - final response = await http.get( - url, - headers: {'Api-Key': _apiKey!}, - ); + final response = await http.get(url, headers: {'Api-Key': _apiKey!}); if (response.statusCode == 200) { final data = jsonDecode(response.body); - print('Podcast details response: ${response.body}'); + // Podcast details API response received return PodcastDetailsData.fromJson(data); } else { - throw Exception('Failed to get podcast details: ${response.statusCode}'); + throw Exception( + 'Failed to get podcast details: ${response.statusCode}', + ); } } catch (e) { print('Error getting podcast details: $e'); @@ -1196,30 +1213,33 @@ class PinepodsService { } // Get podcast details by podcast ID (for subscribed podcasts) - Future?> getPodcastDetailsById(int podcastId, int userId) async { + Future?> getPodcastDetailsById( + int podcastId, + int userId, + ) async { if (_server == null || _apiKey == null) { throw Exception('Not authenticated'); } - final url = Uri.parse('$_server/api/data/get_podcast_details') - .replace(queryParameters: { - 'podcast_id': podcastId.toString(), - 'user_id': userId.toString(), - }); + final url = Uri.parse('$_server/api/data/get_podcast_details').replace( + queryParameters: { + 'podcast_id': podcastId.toString(), + 'user_id': userId.toString(), + }, + ); try { print('Getting podcast details by ID from: $url'); - final response = await http.get( - url, - headers: {'Api-Key': _apiKey!}, - ); + final response = await http.get(url, headers: {'Api-Key': _apiKey!}); if (response.statusCode == 200) { final data = jsonDecode(response.body); - print('Podcast details by ID response: ${response.body}'); + // Podcast details by ID API response received return data['details']; } else { - throw Exception('Failed to get podcast details: ${response.statusCode}'); + throw Exception( + 'Failed to get podcast details: ${response.statusCode}', + ); } } catch (e) { print('Error getting podcast details by ID: $e'); @@ -1228,33 +1248,33 @@ class PinepodsService { } // Get podcast ID by feed URL and title - Future getPodcastId(int userId, String podcastFeed, String podcastTitle) async { + Future getPodcastId( + int userId, + String podcastFeed, + String podcastTitle, + ) async { if (_server == null || _apiKey == null) { throw Exception('Not authenticated'); } - final url = Uri.parse('$_server/api/data/get_podcast_id') - .replace(queryParameters: { - 'user_id': userId.toString(), - 'podcast_feed': podcastFeed, - 'podcast_title': podcastTitle, - }); + final url = Uri.parse('$_server/api/data/get_podcast_id').replace( + queryParameters: { + 'user_id': userId.toString(), + 'podcast_feed': podcastFeed, + 'podcast_title': podcastTitle, + }, + ); try { print('Getting podcast ID from: $url'); - final response = await http.get( - url, - headers: {'Api-Key': _apiKey!}, - ); + final response = await http.get(url, headers: {'Api-Key': _apiKey!}); if (response.statusCode == 200) { final data = jsonDecode(response.body); - print('Podcast ID response: ${response.body}'); - final episodes = data['episodes']; - if (episodes is int) { - return episodes; - } else if (episodes is List && episodes.isNotEmpty) { - return episodes.first as int?; + // Podcast ID API response received + final podcastId = data['podcast_id']; + if (podcastId is int) { + return podcastId; } return null; } else { @@ -1267,39 +1287,43 @@ class PinepodsService { } // Get episodes for an added podcast - Future> getPodcastEpisodes(int userId, int podcastId) async { + Future> getPodcastEpisodes( + int userId, + int podcastId, + ) async { if (_server == null || _apiKey == null) { throw Exception('Not authenticated'); } - final url = Uri.parse('$_server/api/data/podcast_episodes') - .replace(queryParameters: { - 'user_id': userId.toString(), - 'podcast_id': podcastId.toString(), - }); + final url = Uri.parse('$_server/api/data/podcast_episodes').replace( + queryParameters: { + 'user_id': userId.toString(), + 'podcast_id': podcastId.toString(), + }, + ); try { - final response = await http.get( - url, - headers: {'Api-Key': _apiKey!}, - ); + final response = await http.get(url, headers: {'Api-Key': _apiKey!}); if (response.statusCode == 200) { final data = jsonDecode(response.body); final episodes = data['episodes'] as List; return episodes.map((episodeData) { - print('Episode data: $episodeData'); - // Add default values for fields not provided by this endpoint + // Add default values only for fields not provided by this endpoint final episodeWithDefaults = Map.from(episodeData); - episodeWithDefaults['saved'] = false; // Episodes from this endpoint don't have saved status - episodeWithDefaults['queued'] = false; // Episodes from this endpoint don't have queued status - episodeWithDefaults['downloaded'] = false; // Episodes from this endpoint don't have downloaded status - episodeWithDefaults['is_youtube'] = false; + // Only add defaults if these fields are not present in the API response + episodeWithDefaults['saved'] ??= false; + episodeWithDefaults['queued'] ??= false; + episodeWithDefaults['downloaded'] ??= false; + episodeWithDefaults['is_youtube'] ??= false; + return PinepodsEpisode.fromJson(episodeWithDefaults); }).toList(); } else { - throw Exception('Failed to get podcast episodes: ${response.statusCode}'); + throw Exception( + 'Failed to get podcast episodes: ${response.statusCode}', + ); } } catch (e) { print('Error getting podcast episodes: $e'); @@ -1313,17 +1337,14 @@ class PinepodsService { throw Exception('Not authenticated'); } - final url = Uri.parse('$_server/api/data/get_stats').replace(queryParameters: { - 'user_id': userId.toString(), - }); + final url = Uri.parse( + '$_server/api/data/get_stats', + ).replace(queryParameters: {'user_id': userId.toString()}); try { final response = await http.get( url, - headers: { - 'Api-Key': _apiKey!, - 'Content-Type': 'application/json', - }, + headers: {'Api-Key': _apiKey!, 'Content-Type': 'application/json'}, ); if (response.statusCode == 200) { @@ -1349,17 +1370,16 @@ class PinepodsService { try { final response = await http.get( url, - headers: { - 'Api-Key': _apiKey!, - 'Content-Type': 'application/json', - }, + headers: {'Api-Key': _apiKey!, 'Content-Type': 'application/json'}, ); if (response.statusCode == 200) { final data = jsonDecode(response.body); return data['data'] ?? 'Unknown'; } else { - throw Exception('Failed to get PinePods version: ${response.statusCode}'); + throw Exception( + 'Failed to get PinePods version: ${response.statusCode}', + ); } } catch (e) { print('Error getting PinePods version: $e'); @@ -1378,10 +1398,7 @@ class PinepodsService { try { final response = await http.get( url, - headers: { - 'Api-Key': _apiKey!, - 'Content-Type': 'application/json', - }, + headers: {'Api-Key': _apiKey!, 'Content-Type': 'application/json'}, ); if (response.statusCode == 200) { @@ -1396,7 +1413,7 @@ class PinepodsService { } } - // Get user ID from API key + // Get user ID from API key Future getUserIdFromApiKey() async { if (_server == null || _apiKey == null) { throw Exception('Not authenticated'); @@ -1407,10 +1424,7 @@ class PinepodsService { try { final response = await http.get( url, - headers: { - 'Api-Key': _apiKey!, - 'Content-Type': 'application/json', - }, + headers: {'Api-Key': _apiKey!, 'Content-Type': 'application/json'}, ); if (response.statusCode == 200) { @@ -1426,25 +1440,20 @@ class PinepodsService { } // Get home overview data - Future getHomeOverview(int userId) async { + Future getHomeOverview(int userId) async { if (_server == null || _apiKey == null) { throw Exception('Not authenticated'); } final url = Uri.parse('$_server/api/data/home_overview?user_id=$userId'); print('Making API call to: $url'); - + try { final response = await http.get( url, - headers: { - 'Api-Key': _apiKey!, - 'Content-Type': 'application/json', - }, + headers: {'Api-Key': _apiKey!, 'Content-Type': 'application/json'}, ); - print('Home overview response: ${response.statusCode} - ${response.body}'); - if (response.statusCode == 200) { final data = jsonDecode(response.body); return HomeOverview.fromJson(data); @@ -1453,30 +1462,27 @@ class PinepodsService { } } catch (e) { print('Error getting home overview: $e'); - return null; + rethrow; } } // Get playlists - Future getPlaylists(int userId) async { + Future getPlaylists(int userId) async { if (_server == null || _apiKey == null) { throw Exception('Not authenticated'); } final url = Uri.parse('$_server/api/data/get_playlists?user_id=$userId'); print('Making API call to: $url'); - + try { final response = await http.get( url, - headers: { - 'Api-Key': _apiKey!, - 'Content-Type': 'application/json', - }, + headers: {'Api-Key': _apiKey!, 'Content-Type': 'application/json'}, ); - print('Playlists response: ${response.statusCode} - ${response.body}'); - + // Playlists API response received + if (response.statusCode == 200) { final data = jsonDecode(response.body); return PlaylistResponse.fromJson(data); @@ -1485,7 +1491,7 @@ class PinepodsService { } } catch (e) { print('Error getting playlists: $e'); - return null; + rethrow; } } @@ -1497,18 +1503,15 @@ class PinepodsService { final url = Uri.parse('$_server/api/data/get_theme/$userId'); print('Making API call to: $url'); - + try { final response = await http.get( url, - headers: { - 'Api-Key': _apiKey!, - 'Content-Type': 'application/json', - }, + headers: {'Api-Key': _apiKey!, 'Content-Type': 'application/json'}, ); - print('Get theme response: ${response.statusCode} - ${response.body}'); - + // Theme API response received + if (response.statusCode == 200) { final data = jsonDecode(response.body); return data['theme'] as String?; @@ -1529,24 +1532,18 @@ class PinepodsService { final url = Uri.parse('$_server/api/data/user/set_theme'); print('Making API call to: $url'); - + try { - final requestBody = jsonEncode({ - 'user_id': userId, - 'new_theme': theme, - }); + final requestBody = jsonEncode({'user_id': userId, 'new_theme': theme}); final response = await http.put( url, - headers: { - 'Api-Key': _apiKey!, - 'Content-Type': 'application/json', - }, + headers: {'Api-Key': _apiKey!, 'Content-Type': 'application/json'}, body: requestBody, ); - print('Set theme response: ${response.statusCode} - ${response.body}'); - + // Set theme API response received + if (response.statusCode == 200) { final data = jsonDecode(response.body); return data['message'] != null; @@ -1569,22 +1566,21 @@ class PinepodsService { print('Making API call to: $url'); try { - final response = await http.get( - url, - headers: {'Api-Key': _apiKey!}, - ); + final response = await http.get(url, headers: {'Api-Key': _apiKey!}); - print('Get playlists response: ${response.statusCode} - ${response.body}'); + print( + 'Get playlists response: ${response.statusCode} - ${response.body}', + ); if (response.statusCode == 200) { final data = jsonDecode(response.body); final List playlistsData = data['playlists'] ?? []; - + List playlists = []; for (var playlistData in playlistsData) { playlists.add(PlaylistData.fromJson(playlistData)); } - + return playlists; } else { throw Exception('Failed to get playlists: ${response.statusCode}'); @@ -1607,14 +1603,13 @@ class PinepodsService { try { final response = await http.post( url, - headers: { - 'Api-Key': _apiKey!, - 'Content-Type': 'application/json', - }, + headers: {'Api-Key': _apiKey!, 'Content-Type': 'application/json'}, body: jsonEncode(request.toJson()), ); - print('Create playlist response: ${response.statusCode} - ${response.body}'); + print( + 'Create playlist response: ${response.statusCode} - ${response.body}', + ); return response.statusCode == 200; } catch (e) { print('Error creating playlist: $e'); @@ -1634,17 +1629,13 @@ class PinepodsService { try { final response = await http.post( url, - headers: { - 'Api-Key': _apiKey!, - 'Content-Type': 'application/json', - }, - body: jsonEncode({ - 'user_id': userId, - 'playlist_id': playlistId, - }), + headers: {'Api-Key': _apiKey!, 'Content-Type': 'application/json'}, + body: jsonEncode({'user_id': userId, 'playlist_id': playlistId}), ); - print('Delete playlist response: ${response.statusCode} - ${response.body}'); + print( + 'Delete playlist response: ${response.statusCode} - ${response.body}', + ); return response.statusCode == 200; } catch (e) { print('Error deleting playlist: $e'); @@ -1653,27 +1644,33 @@ class PinepodsService { } // Get playlist episodes - Future getPlaylistEpisodes(int userId, int playlistId) async { + Future getPlaylistEpisodes( + int userId, + int playlistId, + ) async { if (_server == null || _apiKey == null) { throw Exception('Not authenticated'); } - final url = Uri.parse('$_server/api/data/get_playlist_episodes?user_id=$userId&playlist_id=$playlistId'); + final url = Uri.parse( + '$_server/api/data/get_playlist_episodes?user_id=$userId&playlist_id=$playlistId', + ); print('Making API call to: $url'); try { - final response = await http.get( - url, - headers: {'Api-Key': _apiKey!}, - ); + final response = await http.get(url, headers: {'Api-Key': _apiKey!}); - print('Get playlist episodes response: ${response.statusCode} - ${response.body}'); + print( + 'Get playlist episodes response: ${response.statusCode} - ${response.body}', + ); if (response.statusCode == 200) { final data = jsonDecode(response.body); return PlaylistEpisodesResponse.fromJson(data); } else { - throw Exception('Failed to get playlist episodes: ${response.statusCode}'); + throw Exception( + 'Failed to get playlist episodes: ${response.statusCode}', + ); } } catch (e) { print('Error getting playlist episodes: $e'); @@ -1693,16 +1690,13 @@ class PinepodsService { try { final response = await http.post( url, - headers: { - 'Api-Key': _apiKey!, - 'Content-Type': 'application/json', - }, - body: jsonEncode({ - 'episode_ids': episodeIds, - }), + headers: {'Api-Key': _apiKey!, 'Content-Type': 'application/json'}, + body: jsonEncode({'episode_ids': episodeIds}), ); - print('Reorder queue response: ${response.statusCode} - ${response.body}'); + print( + 'Reorder queue response: ${response.statusCode} - ${response.body}', + ); return response.statusCode == 200; } catch (e) { print('Error reordering queue: $e'); @@ -1711,7 +1705,10 @@ class PinepodsService { } // Search episodes in user's subscriptions - Future> searchEpisodes(int userId, String searchTerm) async { + Future> searchEpisodes( + int userId, + String searchTerm, + ) async { if (_server == null || _apiKey == null) { throw Exception('Not authenticated'); } @@ -1722,27 +1719,19 @@ class PinepodsService { try { final response = await http.post( url, - headers: { - 'Api-Key': _apiKey!, - 'Content-Type': 'application/json', - }, - body: jsonEncode({ - 'search_term': searchTerm, - 'user_id': userId, - }), + headers: {'Api-Key': _apiKey!, 'Content-Type': 'application/json'}, + body: jsonEncode({'search_term': searchTerm, 'user_id': userId}), ); - print('Search episodes response: ${response.statusCode} - ${response.body}'); - if (response.statusCode == 200) { final data = jsonDecode(response.body); final List episodesData = data['data'] ?? []; - + List episodes = []; for (var episodeData in episodesData) { episodes.add(SearchEpisodeResult.fromJson(episodeData)); } - + return episodes; } else { throw Exception('Failed to search episodes: ${response.statusCode}'); @@ -1754,26 +1743,29 @@ class PinepodsService { } // Fetch podcast 2.0 data for a specific episode - Future?> fetchPodcasting2Data(int episodeId, int userId) async { + Future?> fetchPodcasting2Data( + int episodeId, + int userId, + ) async { if (_server == null || _apiKey == null) { throw Exception('Not authenticated'); } - final url = Uri.parse('$_server/api/data/fetch_podcasting_2_data') - .replace(queryParameters: { - 'episode_id': episodeId.toString(), - 'user_id': userId.toString(), - }); + final url = Uri.parse('$_server/api/data/fetch_podcasting_2_data').replace( + queryParameters: { + 'episode_id': episodeId.toString(), + 'user_id': userId.toString(), + }, + ); print('Making API call to: $url'); try { - final response = await http.get( - url, - headers: {'Api-Key': _apiKey!}, - ); + final response = await http.get(url, headers: {'Api-Key': _apiKey!}); - print('Podcast 2.0 data response: ${response.statusCode} - ${response.body}'); + print( + 'Podcast 2.0 data response: ${response.statusCode} - ${response.body}', + ); if (response.statusCode == 200) { final data = jsonDecode(response.body); @@ -1789,26 +1781,30 @@ class PinepodsService { } // Fetch podcast 2.0 data for a specific podcast - Future?> fetchPodcasting2PodData(int podcastId, int userId) async { + Future?> fetchPodcasting2PodData( + int podcastId, + int userId, + ) async { if (_server == null || _apiKey == null) { throw Exception('Not authenticated'); } final url = Uri.parse('$_server/api/data/fetch_podcasting_2_pod_data') - .replace(queryParameters: { - 'podcast_id': podcastId.toString(), - 'user_id': userId.toString(), - }); + .replace( + queryParameters: { + 'podcast_id': podcastId.toString(), + 'user_id': userId.toString(), + }, + ); print('Making API call to: $url'); try { - final response = await http.get( - url, - headers: {'Api-Key': _apiKey!}, - ); + final response = await http.get(url, headers: {'Api-Key': _apiKey!}); - print('Podcast 2.0 pod data response: ${response.statusCode} - ${response.body}'); + print( + 'Podcast 2.0 pod data response: ${response.statusCode} - ${response.body}', + ); if (response.statusCode == 200) { final data = jsonDecode(response.body); @@ -1934,8 +1930,8 @@ class PlaylistData { name: json['name'] ?? '', description: json['description'], isSystemPlaylist: json['is_system_playlist'] ?? false, - podcastIds: json['podcast_ids'] != null - ? List.from(json['podcast_ids']) + podcastIds: json['podcast_ids'] != null + ? List.from(json['podcast_ids']) : null, includeUnplayed: json['include_unplayed'] ?? true, includePartiallyPlayed: json['include_partially_played'] ?? true, @@ -2113,7 +2109,7 @@ class SearchEpisodeResult { podcastName: json['podcastname'] ?? '', artworkUrl: json['artworkurl'] ?? '', author: json['author'] ?? '', - categories: json['categories'] ?? '', + categories: _parseCategories(json['categories']), description: json['description'] ?? '', episodeCount: json['episodecount'], feedUrl: json['feedurl'] ?? '', @@ -2155,4 +2151,20 @@ class SearchEpisodeResult { isYoutube: isYoutube, ); } -} \ No newline at end of file + + /// Parse categories from either string or Map format + static String _parseCategories(dynamic categories) { + if (categories == null) return ''; + + if (categories is String) { + // Old format - return as is + return categories; + } else if (categories is Map) { + // New format - convert map values to comma-separated string + if (categories.isEmpty) return ''; + return categories.values.join(', '); + } + + return ''; + } +} diff --git a/mobile/lib/services/search_history_service.dart b/mobile/lib/services/search_history_service.dart new file mode 100644 index 00000000..9f2a012d --- /dev/null +++ b/mobile/lib/services/search_history_service.dart @@ -0,0 +1,162 @@ +// lib/services/search_history_service.dart + +import 'dart:convert'; +import 'package:logging/logging.dart'; +import 'package:shared_preferences/shared_preferences.dart'; + +/// Service for managing search history for different search types +/// Stores search terms separately for episode search and podcast search +class SearchHistoryService { + final log = Logger('SearchHistoryService'); + + static const int maxHistoryItems = 30; + static const String episodeSearchKey = 'episode_search_history'; + static const String podcastSearchKey = 'podcast_search_history'; + + SearchHistoryService(); + + /// Adds a search term to episode search history + /// Moves existing term to top if already exists, otherwise adds as new + Future addEpisodeSearchTerm(String searchTerm) async { + print('SearchHistoryService.addEpisodeSearchTerm called with: "$searchTerm"'); + await _addSearchTerm(episodeSearchKey, searchTerm); + } + + /// Adds a search term to podcast search history + /// Moves existing term to top if already exists, otherwise adds as new + Future addPodcastSearchTerm(String searchTerm) async { + print('SearchHistoryService.addPodcastSearchTerm called with: "$searchTerm"'); + await _addSearchTerm(podcastSearchKey, searchTerm); + } + + /// Gets episode search history, most recent first + Future> getEpisodeSearchHistory() async { + print('SearchHistoryService.getEpisodeSearchHistory called'); + return await _getSearchHistory(episodeSearchKey); + } + + /// Gets podcast search history, most recent first + Future> getPodcastSearchHistory() async { + print('SearchHistoryService.getPodcastSearchHistory called'); + return await _getSearchHistory(podcastSearchKey); + } + + /// Clears episode search history + Future clearEpisodeSearchHistory() async { + await _clearSearchHistory(episodeSearchKey); + } + + /// Clears podcast search history + Future clearPodcastSearchHistory() async { + await _clearSearchHistory(podcastSearchKey); + } + + /// Removes a specific term from episode search history + Future removeEpisodeSearchTerm(String searchTerm) async { + await _removeSearchTerm(episodeSearchKey, searchTerm); + } + + /// Removes a specific term from podcast search history + Future removePodcastSearchTerm(String searchTerm) async { + await _removeSearchTerm(podcastSearchKey, searchTerm); + } + + /// Internal method to add a search term to specified history type + Future _addSearchTerm(String historyKey, String searchTerm) async { + if (searchTerm.trim().isEmpty) return; + + final trimmedTerm = searchTerm.trim(); + print('SearchHistoryService: Adding search term "$trimmedTerm" to $historyKey'); + + try { + final prefs = await SharedPreferences.getInstance(); + + // Get existing history + final historyJson = prefs.getString(historyKey); + List history = []; + + if (historyJson != null) { + final List decodedList = jsonDecode(historyJson); + history = decodedList.cast(); + } + + print('SearchHistoryService: Existing data for $historyKey: $history'); + + // Remove if already exists (to avoid duplicates) + history.remove(trimmedTerm); + + // Add to beginning (most recent first) + history.insert(0, trimmedTerm); + + // Limit to max items + if (history.length > maxHistoryItems) { + history = history.take(maxHistoryItems).toList(); + } + + // Save updated history + await prefs.setString(historyKey, jsonEncode(history)); + + print('SearchHistoryService: Updated $historyKey with ${history.length} terms: $history'); + } catch (e) { + print('SearchHistoryService: Failed to add search term to $historyKey: $e'); + log.warning('Failed to add search term to $historyKey: $e'); + } + } + + /// Internal method to get search history for specified type + Future> _getSearchHistory(String historyKey) async { + try { + final prefs = await SharedPreferences.getInstance(); + final historyJson = prefs.getString(historyKey); + + print('SearchHistoryService: Getting history for $historyKey: $historyJson'); + + if (historyJson != null) { + final List decodedList = jsonDecode(historyJson); + final history = decodedList.cast(); + print('SearchHistoryService: Returning history for $historyKey: $history'); + return history; + } + } catch (e) { + print('SearchHistoryService: Failed to get search history for $historyKey: $e'); + } + + print('SearchHistoryService: Returning empty history for $historyKey'); + return []; + } + + /// Internal method to clear search history for specified type + Future _clearSearchHistory(String historyKey) async { + try { + final prefs = await SharedPreferences.getInstance(); + await prefs.remove(historyKey); + print('SearchHistoryService: Cleared search history for $historyKey'); + } catch (e) { + print('SearchHistoryService: Failed to clear search history for $historyKey: $e'); + } + } + + /// Internal method to remove specific term from history + Future _removeSearchTerm(String historyKey, String searchTerm) async { + try { + final prefs = await SharedPreferences.getInstance(); + final historyJson = prefs.getString(historyKey); + + if (historyJson == null) return; + + final List decodedList = jsonDecode(historyJson); + List history = decodedList.cast(); + history.remove(searchTerm); + + if (history.isEmpty) { + await prefs.remove(historyKey); + } else { + await prefs.setString(historyKey, jsonEncode(history)); + } + + print('SearchHistoryService: Removed "$searchTerm" from $historyKey'); + } catch (e) { + print('SearchHistoryService: Failed to remove search term from $historyKey: $e'); + } + } +} \ No newline at end of file diff --git a/mobile/lib/ui/auth/oidc_browser.dart b/mobile/lib/ui/auth/oidc_browser.dart new file mode 100644 index 00000000..a00d7dde --- /dev/null +++ b/mobile/lib/ui/auth/oidc_browser.dart @@ -0,0 +1,147 @@ +import 'package:flutter/material.dart'; +import 'package:webview_flutter/webview_flutter.dart'; +import 'package:pinepods_mobile/services/pinepods/oidc_service.dart'; + +class OidcBrowser extends StatefulWidget { + final String authUrl; + final String serverUrl; + final Function(String apiKey) onSuccess; + final Function(String error) onError; + + const OidcBrowser({ + super.key, + required this.authUrl, + required this.serverUrl, + required this.onSuccess, + required this.onError, + }); + + @override + State createState() => _OidcBrowserState(); +} + +class _OidcBrowserState extends State { + late final WebViewController _controller; + bool _isLoading = true; + String _currentUrl = ''; + bool _callbackTriggered = false; // Prevent duplicate callbacks + + @override + void initState() { + super.initState(); + _initializeWebView(); + } + + void _initializeWebView() { + _controller = WebViewController() + ..setJavaScriptMode(JavaScriptMode.unrestricted) + ..setNavigationDelegate( + NavigationDelegate( + onPageStarted: (String url) { + setState(() { + _currentUrl = url; + _isLoading = true; + }); + + _checkForCallback(url); + }, + onPageFinished: (String url) { + setState(() { + _isLoading = false; + }); + + _checkForCallback(url); + }, + onNavigationRequest: (NavigationRequest request) { + _checkForCallback(request.url); + return NavigationDecision.navigate; + }, + ), + ) + ..loadRequest(Uri.parse(widget.authUrl)); + } + + void _checkForCallback(String url) { + if (_callbackTriggered) return; // Prevent duplicate callbacks + + // Check if we've reached the callback URL with an API key + final apiKey = OidcService.extractApiKeyFromUrl(url); + if (apiKey != null) { + _callbackTriggered = true; // Mark callback as triggered + widget.onSuccess(apiKey); + return; + } + + // Check for error in callback URL + final uri = Uri.tryParse(url); + if (uri != null && uri.path.contains('/oauth/callback')) { + final error = uri.queryParameters['error']; + if (error != null) { + _callbackTriggered = true; // Mark callback as triggered + final errorDescription = uri.queryParameters['description'] ?? uri.queryParameters['details'] ?? 'Authentication failed'; + widget.onError('$error: $errorDescription'); + return; + } + } + } + + @override + Widget build(BuildContext context) { + return Scaffold( + appBar: AppBar( + title: const Text('Sign In'), + backgroundColor: Theme.of(context).primaryColor, + foregroundColor: Colors.white, + leading: IconButton( + icon: const Icon(Icons.close), + onPressed: () { + widget.onError('User cancelled authentication'); + }, + ), + actions: [ + if (_isLoading) + const Padding( + padding: EdgeInsets.all(16.0), + child: SizedBox( + width: 20, + height: 20, + child: CircularProgressIndicator( + strokeWidth: 2, + valueColor: AlwaysStoppedAnimation(Colors.white), + ), + ), + ), + ], + ), + body: Column( + children: [ + // URL bar for debugging + if (MediaQuery.of(context).size.height > 600) + Container( + padding: const EdgeInsets.all(8.0), + color: Colors.grey[200], + child: Row( + children: [ + const Icon(Icons.link, size: 16), + const SizedBox(width: 8), + Expanded( + child: Text( + _currentUrl, + style: const TextStyle(fontSize: 12), + overflow: TextOverflow.ellipsis, + ), + ), + ], + ), + ), + // WebView + Expanded( + child: WebViewWidget( + controller: _controller, + ), + ), + ], + ), + ); + } +} \ No newline at end of file diff --git a/mobile/lib/ui/auth/pinepods_startup_login.dart b/mobile/lib/ui/auth/pinepods_startup_login.dart index bb9735f3..eb6083ce 100644 --- a/mobile/lib/ui/auth/pinepods_startup_login.dart +++ b/mobile/lib/ui/auth/pinepods_startup_login.dart @@ -2,8 +2,13 @@ import 'package:flutter/material.dart'; import 'package:pinepods_mobile/bloc/settings/settings_bloc.dart'; import 'package:pinepods_mobile/services/pinepods/login_service.dart'; +import 'package:pinepods_mobile/services/pinepods/oidc_service.dart'; +import 'package:pinepods_mobile/services/auth_notifier.dart'; +import 'package:pinepods_mobile/ui/auth/oidc_browser.dart'; import 'package:provider/provider.dart'; +import 'package:url_launcher/url_launcher.dart'; import 'dart:math'; +import 'dart:async'; class PinepodsStartupLogin extends StatefulWidget { final VoidCallback? onLoginSuccess; @@ -26,22 +31,27 @@ class _PinepodsStartupLoginState extends State { bool _isLoading = false; bool _showMfaField = false; + bool _isLoadingOidc = false; String _errorMessage = ''; String? _tempServerUrl; - String? _tempApiKey; + String? _tempUsername; int? _tempUserId; + String? _tempMfaSessionToken; + List _oidcProviders = []; + bool _hasCheckedOidc = false; + Timer? _oidcCheckTimer; // List of background images - you can add your own images to assets/images/ final List _backgroundImages = [ - 'assets/images/1.jpg', - 'assets/images/2.jpg', - 'assets/images/3.jpg', - 'assets/images/4.jpg', - 'assets/images/5.jpg', - 'assets/images/6.jpg', - 'assets/images/7.jpg', - 'assets/images/8.jpg', - 'assets/images/9.jpg', + 'assets/images/1.webp', + 'assets/images/2.webp', + 'assets/images/3.webp', + 'assets/images/4.webp', + 'assets/images/5.webp', + 'assets/images/6.webp', + 'assets/images/7.webp', + 'assets/images/8.webp', + 'assets/images/9.webp', ]; late String _selectedBackground; @@ -52,10 +62,100 @@ class _PinepodsStartupLoginState extends State { // Select a random background image final random = Random(); _selectedBackground = _backgroundImages[random.nextInt(_backgroundImages.length)]; + + // Listen for server URL changes to check OIDC providers + _serverController.addListener(_onServerUrlChanged); + + // Register global login success callback + AuthNotifier.setGlobalLoginSuccessCallback(_handleLoginSuccess); } - Future _connectToPinepods() async { - if (!_formKey.currentState!.validate()) { + void _onServerUrlChanged() { + final serverUrl = _serverController.text.trim(); + + // Cancel any existing timer + _oidcCheckTimer?.cancel(); + + // Reset OIDC state + setState(() { + _oidcProviders.clear(); + _hasCheckedOidc = false; + _isLoadingOidc = false; + }); + + // Only check if URL looks complete and valid + if (serverUrl.isNotEmpty && + (serverUrl.startsWith('http://') || serverUrl.startsWith('https://')) && + _isValidUrl(serverUrl)) { + + // Debounce the API call - wait 1 second after user stops typing + _oidcCheckTimer = Timer(const Duration(seconds: 1), () { + _checkOidcProviders(serverUrl); + }); + } + } + + bool _isValidUrl(String url) { + try { + final uri = Uri.parse(url); + // Check if it has a proper host (not just protocol) + return uri.hasScheme && + uri.host.isNotEmpty && + uri.host.contains('.') && // Must have at least one dot for domain + uri.host.length > 3; // Minimum reasonable length + } catch (e) { + return false; + } + } + + Future _checkOidcProviders(String serverUrl) async { + // Allow rechecking if server URL changed + final currentUrl = _serverController.text.trim(); + if (currentUrl != serverUrl) return; // URL changed while we were waiting + + setState(() { + _isLoadingOidc = true; + }); + + try { + final providers = await OidcService.getPublicProviders(serverUrl); + // Double-check the URL hasn't changed during the API call + if (mounted && _serverController.text.trim() == serverUrl) { + setState(() { + _oidcProviders = providers; + _hasCheckedOidc = true; + _isLoadingOidc = false; + }); + } + } catch (e) { + // Only update state if URL hasn't changed + if (mounted && _serverController.text.trim() == serverUrl) { + setState(() { + _oidcProviders.clear(); + _hasCheckedOidc = true; + _isLoadingOidc = false; + }); + } + } + } + + // Manual retry when user focuses on other fields (like username) + void _retryOidcCheck() { + final serverUrl = _serverController.text.trim(); + if (serverUrl.isNotEmpty && + _isValidUrl(serverUrl) && + !_hasCheckedOidc && + !_isLoadingOidc) { + _checkOidcProviders(serverUrl); + } + } + + Future _handleOidcLogin(OidcProvider provider) async { + final serverUrl = _serverController.text.trim(); + if (serverUrl.isEmpty) { + setState(() { + _errorMessage = 'Please enter a server URL first'; + }); return; } @@ -65,51 +165,218 @@ class _PinepodsStartupLoginState extends State { }); try { - final serverUrl = _serverController.text.trim(); - final username = _usernameController.text.trim(); - final password = _passwordController.text; - final mfaCode = _showMfaField ? _mfaController.text.trim() : null; - - final result = await PinepodsLoginService.login( - serverUrl, - username, - password, - mfaCode: mfaCode, + // Generate PKCE and state parameters for security + final pkce = OidcService.generatePkce(); + final state = OidcService.generateState(); + + // Build authorization URL for in-app browser + final authUrl = await OidcService.buildOidcLoginUrl( + provider: provider, + serverUrl: serverUrl, + state: state, + pkce: pkce, ); - if (result.isSuccess) { - // Save the connection details including user ID - final settingsBloc = Provider.of(context, listen: false); - settingsBloc.setPinepodsServer(result.serverUrl!); - settingsBloc.setPinepodsApiKey(result.apiKey!); - settingsBloc.setPinepodsUserId(result.userId!); + if (authUrl == null) { + setState(() { + _errorMessage = 'Failed to prepare OIDC authentication URL'; + _isLoading = false; + }); + return; + } + + setState(() { + _isLoading = false; + }); - // Fetch theme from server after successful login + // Launch in-app browser + if (mounted) { + await Navigator.of(context).push( + MaterialPageRoute( + builder: (context) => OidcBrowser( + authUrl: authUrl, + serverUrl: serverUrl, + onSuccess: (apiKey) async { + Navigator.of(context).pop(); // Close the browser + await _completeOidcLogin(apiKey, serverUrl); + }, + onError: (error) { + Navigator.of(context).pop(); // Close the browser + setState(() { + _errorMessage = 'Authentication failed: $error'; + }); + }, + ), + ), + ); + } + + } catch (e) { + setState(() { + _errorMessage = 'OIDC login error: ${e.toString()}'; + _isLoading = false; + }); + } + } + + Future _completeOidcLogin(String apiKey, String serverUrl) async { + if (!mounted) return; + + setState(() { + _isLoading = true; + _errorMessage = ''; + }); + + try { + // Verify API key + final isValidKey = await PinepodsLoginService.verifyApiKey(serverUrl, apiKey); + if (!isValidKey) { + throw Exception('API key verification failed'); + } + + // Get user ID + final userId = await PinepodsLoginService.getUserId(serverUrl, apiKey); + if (userId == null) { + throw Exception('Failed to get user ID'); + } + + // Get user details + final userDetails = await PinepodsLoginService.getUserDetails(serverUrl, apiKey, userId); + if (userDetails == null) { + throw Exception('Failed to get user details'); + } + + // Store credentials + final settingsBloc = Provider.of(context, listen: false); + settingsBloc.setPinepodsServer(serverUrl); + settingsBloc.setPinepodsApiKey(apiKey); + settingsBloc.setPinepodsUserId(userId); + + // Set additional user details if available + if (userDetails.username != null) { + settingsBloc.setPinepodsUsername(userDetails.username!); + } + if (userDetails.email != null) { + settingsBloc.setPinepodsEmail(userDetails.email!); + } + + // Fetch theme from server + try { await settingsBloc.fetchThemeFromServer(); + } catch (e) { + // Theme fetch failure is non-critical + } + + // Notify login success + AuthNotifier.notifyLoginSuccess(); + // Call the callback if provided + if (widget.onLoginSuccess != null) { + widget.onLoginSuccess!(); + } + + } catch (e) { + if (mounted) { setState(() { + _errorMessage = 'Failed to complete login: ${e.toString()}'; _isLoading = false; }); + } + } + } - // Call success callback - if (widget.onLoginSuccess != null) { - widget.onLoginSuccess!(); + Future _connectToPinepods() async { + if (!_formKey.currentState!.validate()) { + return; + } + + setState(() { + _isLoading = true; + _errorMessage = ''; + }); + + try { + if (_showMfaField && _tempMfaSessionToken != null) { + // Complete MFA login flow + final mfaCode = _mfaController.text.trim(); + final result = await PinepodsLoginService.completeMfaLogin( + serverUrl: _tempServerUrl!, + username: _tempUsername!, + mfaSessionToken: _tempMfaSessionToken!, + mfaCode: mfaCode, + ); + + if (result.isSuccess) { + // Save the connection details including user ID + final settingsBloc = Provider.of(context, listen: false); + settingsBloc.setPinepodsServer(result.serverUrl!); + settingsBloc.setPinepodsApiKey(result.apiKey!); + settingsBloc.setPinepodsUserId(result.userId!); + + // Fetch theme from server after successful login + await settingsBloc.fetchThemeFromServer(); + + setState(() { + _isLoading = false; + }); + + // Call success callback + if (widget.onLoginSuccess != null) { + widget.onLoginSuccess!(); + } + } else { + setState(() { + _errorMessage = result.errorMessage ?? 'MFA verification failed'; + _isLoading = false; + }); } - } else if (result.requiresMfa) { - // Store temporary credentials and show MFA field - setState(() { - _tempServerUrl = result.serverUrl; - _tempApiKey = result.apiKey; - _tempUserId = result.userId; - _showMfaField = true; - _isLoading = false; - _errorMessage = 'Please enter your MFA code'; - }); } else { - setState(() { - _errorMessage = result.errorMessage ?? 'Login failed'; - _isLoading = false; - }); + // Initial login flow + final serverUrl = _serverController.text.trim(); + final username = _usernameController.text.trim(); + final password = _passwordController.text; + + final result = await PinepodsLoginService.login( + serverUrl, + username, + password, + ); + + if (result.isSuccess) { + // Save the connection details including user ID + final settingsBloc = Provider.of(context, listen: false); + settingsBloc.setPinepodsServer(result.serverUrl!); + settingsBloc.setPinepodsApiKey(result.apiKey!); + settingsBloc.setPinepodsUserId(result.userId!); + + // Fetch theme from server after successful login + await settingsBloc.fetchThemeFromServer(); + + setState(() { + _isLoading = false; + }); + + // Call success callback + if (widget.onLoginSuccess != null) { + widget.onLoginSuccess!(); + } + } else if (result.requiresMfa) { + // Store MFA session info and show MFA field + setState(() { + _tempServerUrl = result.serverUrl; + _tempUsername = result.username; + _tempUserId = result.userId; + _tempMfaSessionToken = result.mfaSessionToken; + _showMfaField = true; + _isLoading = false; + _errorMessage = 'Please enter your MFA code'; + }); + } else { + setState(() { + _errorMessage = result.errorMessage ?? 'Login failed'; + _isLoading = false; + }); + } } } catch (e) { setState(() { @@ -123,13 +390,29 @@ class _PinepodsStartupLoginState extends State { setState(() { _showMfaField = false; _tempServerUrl = null; - _tempApiKey = null; + _tempUsername = null; _tempUserId = null; + _tempMfaSessionToken = null; _mfaController.clear(); _errorMessage = ''; }); } + /// Parse hex color string to Color object + Color _parseColor(String hexColor) { + try { + final hex = hexColor.replaceAll('#', ''); + if (hex.length == 6) { + return Color(int.parse('FF$hex', radix: 16)); + } else if (hex.length == 8) { + return Color(int.parse(hex, radix: 16)); + } + } catch (e) { + // Fallback to default color on parsing error + } + return Theme.of(context).primaryColor; + } + @override Widget build(BuildContext context) { @@ -163,37 +446,39 @@ class _PinepodsStartupLoginState extends State { crossAxisAlignment: CrossAxisAlignment.stretch, children: [ // App Logo/Title - Container( - width: 80, - height: 80, - decoration: BoxDecoration( - borderRadius: BorderRadius.circular(16), - boxShadow: [ - BoxShadow( - color: Colors.black.withOpacity(0.1), - blurRadius: 8, - offset: const Offset(0, 4), + Center( + child: Container( + width: 80, + height: 80, + decoration: BoxDecoration( + borderRadius: BorderRadius.circular(16), + boxShadow: [ + BoxShadow( + color: Colors.black.withOpacity(0.1), + blurRadius: 8, + offset: const Offset(0, 4), + ), + ], + ), + child: ClipRRect( + borderRadius: BorderRadius.circular(16), + child: Image.asset( + 'assets/images/favicon.png', + fit: BoxFit.contain, + errorBuilder: (context, error, stackTrace) { + return Container( + decoration: BoxDecoration( + color: Theme.of(context).primaryColor, + borderRadius: BorderRadius.circular(16), + ), + child: Icon( + Icons.headset, + size: 48, + color: Colors.white, + ), + ); + }, ), - ], - ), - child: ClipRRect( - borderRadius: BorderRadius.circular(16), - child: Image.asset( - 'assets/images/favicon.png', - fit: BoxFit.cover, - errorBuilder: (context, error, stackTrace) { - return Container( - decoration: BoxDecoration( - color: Theme.of(context).primaryColor, - borderRadius: BorderRadius.circular(16), - ), - child: Icon( - Icons.headset, - size: 48, - color: Colors.white, - ), - ); - }, ), ), ), @@ -240,22 +525,30 @@ class _PinepodsStartupLoginState extends State { const SizedBox(height: 16), // Username Field - TextFormField( - controller: _usernameController, - decoration: InputDecoration( - labelText: 'Username', - prefixIcon: const Icon(Icons.person), - border: OutlineInputBorder( - borderRadius: BorderRadius.circular(12), - ), - ), - validator: (value) { - if (value == null || value.isEmpty) { - return 'Please enter your username'; + Focus( + onFocusChange: (hasFocus) { + if (hasFocus) { + // User focused on username field, retry OIDC check if needed + _retryOidcCheck(); } - return null; }, - textInputAction: TextInputAction.next, + child: TextFormField( + controller: _usernameController, + decoration: InputDecoration( + labelText: 'Username', + prefixIcon: const Icon(Icons.person), + border: OutlineInputBorder( + borderRadius: BorderRadius.circular(12), + ), + ), + validator: (value) { + if (value == null || value.isEmpty) { + return 'Please enter your username'; + } + return null; + }, + textInputAction: TextInputAction.next, + ), ), const SizedBox(height: 16), @@ -371,6 +664,75 @@ class _PinepodsStartupLoginState extends State { const SizedBox(height: 16), + // OIDC Providers Section + if (_oidcProviders.isNotEmpty && !_showMfaField) ...[ + // Divider + Row( + children: [ + const Expanded(child: Divider()), + Padding( + padding: const EdgeInsets.symmetric(horizontal: 16), + child: Text( + 'Or continue with', + style: Theme.of(context).textTheme.bodySmall?.copyWith( + color: Colors.grey[600], + ), + ), + ), + const Expanded(child: Divider()), + ], + ), + const SizedBox(height: 16), + + // OIDC Provider Buttons + ..._oidcProviders.map((provider) => Padding( + padding: const EdgeInsets.only(bottom: 8), + child: SizedBox( + width: double.infinity, + child: ElevatedButton( + onPressed: _isLoading ? null : () => _handleOidcLogin(provider), + style: ElevatedButton.styleFrom( + backgroundColor: _parseColor(provider.buttonColorHex), + foregroundColor: _parseColor(provider.buttonTextColorHex), + padding: const EdgeInsets.all(16), + shape: RoundedRectangleBorder( + borderRadius: BorderRadius.circular(12), + ), + ), + child: Row( + mainAxisAlignment: MainAxisAlignment.center, + children: [ + if (provider.iconSvg != null && provider.iconSvg!.isNotEmpty) + Container( + width: 20, + height: 20, + margin: const EdgeInsets.only(right: 8), + child: const Icon(Icons.account_circle, size: 20), + ), + Text( + provider.displayText, + style: const TextStyle(fontSize: 16), + ), + ], + ), + ), + ), + )), + + const SizedBox(height: 16), + ], + + // Loading indicator for OIDC discovery + if (_isLoadingOidc) ...[ + const SizedBox( + height: 20, + width: 20, + child: CircularProgressIndicator(strokeWidth: 2), + ), + const SizedBox(height: 16), + ], + + // Additional Info Text( 'Don\'t have a PinePods server? Visit pinepods.online to learn more.', @@ -391,12 +753,25 @@ class _PinepodsStartupLoginState extends State { ); } + /// Handle login success from any source (traditional or OIDC) + void _handleLoginSuccess() { + if (mounted) { + widget.onLoginSuccess?.call(); + } + } + @override void dispose() { + _oidcCheckTimer?.cancel(); + _serverController.removeListener(_onServerUrlChanged); _serverController.dispose(); _usernameController.dispose(); _passwordController.dispose(); _mfaController.dispose(); + + // Clear global callback to prevent memory leaks + AuthNotifier.clearGlobalLoginSuccessCallback(); + super.dispose(); } } \ No newline at end of file diff --git a/mobile/lib/ui/debug/debug_logs_page.dart b/mobile/lib/ui/debug/debug_logs_page.dart new file mode 100644 index 00000000..bdf0670e --- /dev/null +++ b/mobile/lib/ui/debug/debug_logs_page.dart @@ -0,0 +1,656 @@ +// lib/ui/debug/debug_logs_page.dart +import 'dart:io'; +import 'package:flutter/material.dart'; +import 'package:flutter/services.dart'; +import 'package:url_launcher/url_launcher.dart'; +import 'package:pinepods_mobile/services/logging/app_logger.dart'; + +class DebugLogsPage extends StatefulWidget { + const DebugLogsPage({Key? key}) : super(key: key); + + @override + State createState() => _DebugLogsPageState(); +} + +class _DebugLogsPageState extends State { + final AppLogger _logger = AppLogger(); + final ScrollController _scrollController = ScrollController(); + List _logs = []; + LogLevel? _selectedLevel; + bool _showDeviceInfo = true; + List _sessionFiles = []; + bool _hasPreviousCrash = false; + + @override + void initState() { + super.initState(); + _loadLogs(); + _loadSessionFiles(); + } + + void _loadLogs() { + setState(() { + if (_selectedLevel == null) { + _logs = _logger.logs; + } else { + _logs = _logger.getLogsByLevel(_selectedLevel!); + } + }); + } + + Future _copyLogsToClipboard() async { + try { + final formattedLogs = _logger.getFormattedLogs(); + await Clipboard.setData(ClipboardData(text: formattedLogs)); + + if (mounted) { + ScaffoldMessenger.of(context).showSnackBar( + const SnackBar( + content: Text('Logs copied to clipboard!'), + backgroundColor: Colors.green, + duration: Duration(seconds: 2), + ), + ); + } + } catch (e) { + if (mounted) { + ScaffoldMessenger.of(context).showSnackBar( + SnackBar( + content: Text('Failed to copy logs: $e'), + backgroundColor: Colors.red, + duration: const Duration(seconds: 3), + ), + ); + } + } + } + + Future _loadSessionFiles() async { + try { + final files = await _logger.getSessionFiles(); + final hasCrash = await _logger.hasPreviousCrash(); + setState(() { + _sessionFiles = files; + _hasPreviousCrash = hasCrash; + }); + } catch (e) { + print('Failed to load session files: $e'); + } + } + + Future _copyCurrentSessionToClipboard() async { + try { + final formattedLogs = _logger.getFormattedLogsWithSessionInfo(); + await Clipboard.setData(ClipboardData(text: formattedLogs)); + + if (mounted) { + ScaffoldMessenger.of(context).showSnackBar( + const SnackBar( + content: Text('Current session logs copied to clipboard!'), + backgroundColor: Colors.green, + duration: Duration(seconds: 2), + ), + ); + } + } catch (e) { + if (mounted) { + ScaffoldMessenger.of(context).showSnackBar( + SnackBar( + content: Text('Failed to copy logs: $e'), + backgroundColor: Colors.red, + duration: const Duration(seconds: 3), + ), + ); + } + } + } + + Future _copySessionFileToClipboard(File sessionFile) async { + try { + final content = await sessionFile.readAsString(); + final deviceInfo = _logger.deviceInfo?.formattedInfo ?? 'Device info not available'; + final formattedContent = '$deviceInfo\n\n${'=' * 50}\nSession File: ${sessionFile.path.split('/').last}\n${'=' * 50}\n\n$content'; + + await Clipboard.setData(ClipboardData(text: formattedContent)); + + if (mounted) { + ScaffoldMessenger.of(context).showSnackBar( + SnackBar( + content: Text('Session ${sessionFile.path.split('/').last} copied to clipboard!'), + backgroundColor: Colors.green, + duration: const Duration(seconds: 2), + ), + ); + } + } catch (e) { + if (mounted) { + ScaffoldMessenger.of(context).showSnackBar( + SnackBar( + content: Text('Failed to copy session file: $e'), + backgroundColor: Colors.red, + duration: const Duration(seconds: 3), + ), + ); + } + } + } + + Future _copyCrashLogToClipboard() async { + try { + final crashPath = _logger.crashLogPath; + if (crashPath == null) { + throw Exception('Crash log path not available'); + } + + final crashFile = File(crashPath); + final content = await crashFile.readAsString(); + + await Clipboard.setData(ClipboardData(text: content)); + + if (mounted) { + ScaffoldMessenger.of(context).showSnackBar( + const SnackBar( + content: Text('Crash log copied to clipboard!'), + backgroundColor: Colors.orange, + duration: Duration(seconds: 2), + ), + ); + } + } catch (e) { + if (mounted) { + ScaffoldMessenger.of(context).showSnackBar( + SnackBar( + content: Text('Failed to copy crash log: $e'), + backgroundColor: Colors.red, + duration: const Duration(seconds: 3), + ), + ); + } + } + } + + Future _openBugTracker() async { + const url = 'https://github.com/madeofpendletonwool/pinepods/issues'; + try { + final uri = Uri.parse(url); + await launchUrl(uri, mode: LaunchMode.externalApplication); + } catch (e) { + if (mounted) { + ScaffoldMessenger.of(context).showSnackBar( + SnackBar( + content: Text('Could not open bug tracker: $e'), + backgroundColor: Colors.red, + ), + ); + } + } + } + + void _clearLogs() { + showDialog( + context: context, + builder: (context) => AlertDialog( + title: const Text('Clear Logs'), + content: const Text('Are you sure you want to clear all logs? This action cannot be undone.'), + actions: [ + TextButton( + onPressed: () => Navigator.of(context).pop(), + child: const Text('Cancel'), + ), + TextButton( + onPressed: () { + _logger.clearLogs(); + _loadLogs(); + Navigator.of(context).pop(); + ScaffoldMessenger.of(context).showSnackBar( + const SnackBar( + content: Text('Logs cleared'), + backgroundColor: Colors.orange, + ), + ); + }, + child: const Text('Clear'), + ), + ], + ), + ); + } + + void _scrollToBottom() { + if (_scrollController.hasClients) { + _scrollController.animateTo( + _scrollController.position.maxScrollExtent, + duration: const Duration(milliseconds: 300), + curve: Curves.easeOut, + ); + } + } + + Color _getLevelColor(LogLevel level) { + switch (level) { + case LogLevel.debug: + return Colors.grey; + case LogLevel.info: + return Colors.blue; + case LogLevel.warning: + return Colors.orange; + case LogLevel.error: + return Colors.red; + case LogLevel.critical: + return Colors.purple; + } + } + + @override + Widget build(BuildContext context) { + return Scaffold( + appBar: AppBar( + title: const Text('Debug Logs'), + elevation: 0, + actions: [ + PopupMenuButton( + onSelected: (value) { + switch (value) { + case 'filter': + _showFilterDialog(); + break; + case 'clear': + _clearLogs(); + break; + case 'refresh': + _loadLogs(); + break; + case 'scroll_bottom': + _scrollToBottom(); + break; + } + }, + itemBuilder: (context) => [ + const PopupMenuItem( + value: 'filter', + child: Row( + children: [ + Icon(Icons.filter_list), + SizedBox(width: 8), + Text('Filter'), + ], + ), + ), + const PopupMenuItem( + value: 'refresh', + child: Row( + children: [ + Icon(Icons.refresh), + SizedBox(width: 8), + Text('Refresh'), + ], + ), + ), + const PopupMenuItem( + value: 'scroll_bottom', + child: Row( + children: [ + Icon(Icons.vertical_align_bottom), + SizedBox(width: 8), + Text('Scroll to Bottom'), + ], + ), + ), + const PopupMenuItem( + value: 'clear', + child: Row( + children: [ + Icon(Icons.clear_all), + SizedBox(width: 8), + Text('Clear Logs'), + ], + ), + ), + ], + ), + ], + ), + body: Column( + children: [ + // Header with device info toggle and stats + Container( + padding: const EdgeInsets.all(16.0), + color: Theme.of(context).cardColor, + child: Column( + children: [ + Row( + children: [ + Expanded( + child: Text( + 'Total Entries: ${_logs.length}', + style: Theme.of(context).textTheme.titleMedium, + ), + ), + if (_selectedLevel != null) + Chip( + label: Text(_selectedLevel!.name.toUpperCase()), + backgroundColor: _getLevelColor(_selectedLevel!).withOpacity(0.2), + deleteIcon: const Icon(Icons.close, size: 16), + onDeleted: () { + setState(() { + _selectedLevel = null; + }); + _loadLogs(); + }, + ), + ], + ), + const SizedBox(height: 8), + Row( + children: [ + Expanded( + child: ElevatedButton.icon( + onPressed: _copyCurrentSessionToClipboard, + icon: const Icon(Icons.copy), + label: const Text('Copy Current'), + style: ElevatedButton.styleFrom( + backgroundColor: Colors.green, + foregroundColor: Colors.white, + ), + ), + ), + const SizedBox(width: 8), + Expanded( + child: ElevatedButton.icon( + onPressed: _openBugTracker, + icon: const Icon(Icons.bug_report), + label: const Text('Report Bug'), + style: ElevatedButton.styleFrom( + backgroundColor: Colors.orange, + foregroundColor: Colors.white, + ), + ), + ), + ], + ), + ], + ), + ), + const Divider(height: 1), + + // Session Files Section + if (_sessionFiles.isNotEmpty || _hasPreviousCrash) + ExpansionTile( + title: const Text('Session Files & Crash Logs'), + leading: const Icon(Icons.folder), + initiallyExpanded: false, + children: [ + if (_hasPreviousCrash) + ListTile( + leading: const Icon(Icons.warning, color: Colors.red), + title: const Text('Previous Crash Log'), + subtitle: const Text('Tap to copy crash log to clipboard'), + trailing: IconButton( + icon: const Icon(Icons.copy), + onPressed: _copyCrashLogToClipboard, + ), + onTap: _copyCrashLogToClipboard, + ), + ..._sessionFiles.map((file) { + final fileName = file.path.split('/').last; + final isCurrentSession = fileName.contains(_logger.currentSessionPath?.split('/').last?.replaceFirst('session_', '').replaceFirst('.log', '') ?? ''); + + return ListTile( + leading: Icon( + isCurrentSession ? Icons.play_circle : Icons.history, + color: isCurrentSession ? Colors.green : Colors.grey, + ), + title: Text(fileName), + subtitle: Text( + 'Modified: ${file.lastModifiedSync().toString().substring(0, 16)}${isCurrentSession ? ' (Current)' : ''}', + style: TextStyle( + fontSize: 12, + color: isCurrentSession ? Colors.green : Colors.grey[600], + ), + ), + trailing: IconButton( + icon: const Icon(Icons.copy), + onPressed: () => _copySessionFileToClipboard(file), + ), + onTap: () => _copySessionFileToClipboard(file), + ); + }).toList(), + if (_sessionFiles.isEmpty && !_hasPreviousCrash) + const Padding( + padding: EdgeInsets.all(16.0), + child: Text( + 'No session files available yet', + style: TextStyle(color: Colors.grey), + ), + ), + ], + ), + + // Device info section (collapsible) + if (_showDeviceInfo && _logger.deviceInfo != null) + ExpansionTile( + title: const Text('Device Information'), + leading: const Icon(Icons.phone_android), + initiallyExpanded: false, + children: [ + Padding( + padding: const EdgeInsets.all(16.0), + child: Container( + width: double.infinity, + padding: const EdgeInsets.all(12.0), + decoration: BoxDecoration( + color: Theme.of(context).cardColor, + borderRadius: BorderRadius.circular(8), + border: Border.all(color: Colors.grey.withOpacity(0.3)), + ), + child: Text( + _logger.deviceInfo!.formattedInfo, + style: const TextStyle(fontFamily: 'monospace', fontSize: 12), + ), + ), + ), + ], + ), + + // Logs list + Expanded( + child: _logs.isEmpty + ? const Center( + child: Column( + mainAxisAlignment: MainAxisAlignment.center, + children: [ + Icon(Icons.inbox_outlined, size: 64, color: Colors.grey), + SizedBox(height: 16), + Text( + 'No logs found', + style: TextStyle(fontSize: 18, color: Colors.grey), + ), + SizedBox(height: 8), + Text( + 'Use the app to generate logs', + style: TextStyle(color: Colors.grey), + ), + ], + ), + ) + : ListView.builder( + controller: _scrollController, + itemCount: _logs.length, + itemBuilder: (context, index) { + final log = _logs[index]; + return _buildLogEntry(log); + }, + ), + ), + ], + ), + floatingActionButton: _logs.isNotEmpty + ? FloatingActionButton( + onPressed: _scrollToBottom, + tooltip: 'Scroll to bottom', + child: const Icon(Icons.vertical_align_bottom), + ) + : null, + ); + } + + Widget _buildLogEntry(LogEntry log) { + final levelColor = _getLevelColor(log.level); + + return Container( + margin: const EdgeInsets.symmetric(horizontal: 8, vertical: 2), + child: Card( + elevation: 1, + child: ExpansionTile( + leading: Container( + width: 8, + height: 8, + decoration: BoxDecoration( + color: levelColor, + shape: BoxShape.circle, + ), + ), + title: Text( + log.message, + style: const TextStyle(fontSize: 14), + maxLines: 2, + overflow: TextOverflow.ellipsis, + ), + subtitle: Text( + '${log.timestamp.toString().substring(0, 19)} • ${log.levelString} • ${log.tag}', + style: TextStyle( + fontSize: 12, + color: Colors.grey[600], + ), + ), + children: [ + Padding( + padding: const EdgeInsets.all(16.0), + child: Container( + width: double.infinity, + padding: const EdgeInsets.all(12.0), + decoration: BoxDecoration( + color: Theme.of(context).colorScheme.surfaceVariant.withOpacity(0.3), + borderRadius: BorderRadius.circular(8), + ), + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + SelectableText( + log.formattedMessage, + style: const TextStyle( + fontFamily: 'monospace', + fontSize: 12, + ), + ), + if (log.stackTrace != null && log.stackTrace!.isNotEmpty) ...[ + const SizedBox(height: 8), + const Divider(), + const SizedBox(height: 8), + const Text( + 'Stack Trace:', + style: TextStyle(fontWeight: FontWeight.bold), + ), + const SizedBox(height: 4), + SelectableText( + log.stackTrace!, + style: const TextStyle( + fontFamily: 'monospace', + fontSize: 10, + ), + ), + ], + const SizedBox(height: 8), + Row( + mainAxisAlignment: MainAxisAlignment.end, + children: [ + TextButton.icon( + onPressed: () { + Clipboard.setData(ClipboardData(text: log.formattedMessage)); + ScaffoldMessenger.of(context).showSnackBar( + const SnackBar(content: Text('Log entry copied to clipboard')), + ); + }, + icon: const Icon(Icons.copy, size: 16), + label: const Text('Copy'), + ), + ], + ), + ], + ), + ), + ), + ], + ), + ), + ); + } + + void _showFilterDialog() { + showDialog( + context: context, + builder: (context) => AlertDialog( + title: const Text('Filter Logs'), + content: Column( + mainAxisSize: MainAxisSize.min, + children: [ + const Text('Show only logs of level:'), + const SizedBox(height: 16), + ...LogLevel.values.map((level) => RadioListTile( + title: Row( + children: [ + Container( + width: 12, + height: 12, + decoration: BoxDecoration( + color: _getLevelColor(level), + shape: BoxShape.circle, + ), + ), + const SizedBox(width: 8), + Text(level.name.toUpperCase()), + ], + ), + value: level, + groupValue: _selectedLevel, + onChanged: (value) { + setState(() { + _selectedLevel = value; + }); + }, + )), + RadioListTile( + title: const Text('All Levels'), + value: null, + groupValue: _selectedLevel, + onChanged: (value) { + setState(() { + _selectedLevel = null; + }); + }, + ), + ], + ), + actions: [ + TextButton( + onPressed: () => Navigator.of(context).pop(), + child: const Text('Cancel'), + ), + ElevatedButton( + onPressed: () { + _loadLogs(); + Navigator.of(context).pop(); + }, + child: const Text('Apply'), + ), + ], + ), + ); + } + + @override + void dispose() { + _scrollController.dispose(); + super.dispose(); + } +} \ No newline at end of file diff --git a/mobile/lib/ui/pinepods/downloads.dart b/mobile/lib/ui/pinepods/downloads.dart index eb0efb79..2c6f1f98 100644 --- a/mobile/lib/ui/pinepods/downloads.dart +++ b/mobile/lib/ui/pinepods/downloads.dart @@ -13,6 +13,8 @@ import 'package:pinepods_mobile/ui/widgets/episode_tile.dart'; import 'package:pinepods_mobile/ui/widgets/episode_context_menu.dart'; import 'package:pinepods_mobile/ui/widgets/paginated_episode_list.dart'; import 'package:pinepods_mobile/ui/widgets/platform_progress_indicator.dart'; +import 'package:pinepods_mobile/services/error_handling_service.dart'; +import 'package:pinepods_mobile/services/audio/audio_player_service.dart'; import 'package:pinepods_mobile/ui/pinepods/episode_details.dart'; import 'package:provider/provider.dart'; import 'package:logging/logging.dart'; @@ -268,6 +270,7 @@ class _PinepodsDownloadsState extends State { setState(() { _serverDownloads.removeWhere((e) => e.episodeId == episode.episodeId); _serverDownloadsByPodcast = _groupEpisodesByPodcast(_serverDownloads); + _filterDownloads(); // Update filtered lists after removal }); } else { _showErrorSnackBar('Failed to delete episode from server'); @@ -437,7 +440,7 @@ class _PinepodsDownloadsState extends State { : null, onPlayPressed: isServerDownload ? (episode) => _playServerEpisode(episode) - : null, + : (episode) => _playLocalEpisode(episode), ), ], ), @@ -503,38 +506,44 @@ class _PinepodsDownloadsState extends State { ); } + // Update filtered local downloads when local downloads change + _filterLocalDownloads(currentLocalDownloadsByPodcast); + if (_errorMessage != null) { - return SliverFillRemaining( - hasScrollBody: false, - child: Center( - child: Column( - mainAxisAlignment: MainAxisAlignment.center, - children: [ - Icon( - Icons.error_outline, - size: 64, - color: Colors.red[300], - ), - const SizedBox(height: 16), - Text( - _errorMessage!, - textAlign: TextAlign.center, - style: Theme.of(context).textTheme.bodyLarge, - ), - const SizedBox(height: 16), - ElevatedButton( - onPressed: _loadDownloads, - child: const Text('Retry'), - ), - ], + // Check if this is a server connection error - show offline mode for downloads + if (_errorMessage!.isServerConnectionError) { + // Show offline downloads only with special UI + return _buildOfflineDownloadsView(_filteredLocalDownloadsByPodcast); + } else { + return SliverFillRemaining( + hasScrollBody: false, + child: Center( + child: Column( + mainAxisAlignment: MainAxisAlignment.center, + children: [ + Icon( + Icons.error_outline, + size: 64, + color: Colors.red[300], + ), + const SizedBox(height: 16), + Text( + _errorMessage!.userFriendlyMessage, + textAlign: TextAlign.center, + style: Theme.of(context).textTheme.bodyLarge, + ), + const SizedBox(height: 16), + ElevatedButton( + onPressed: _loadDownloads, + child: const Text('Retry'), + ), + ], + ), ), - ), - ); + ); + } } - // Update filtered local downloads when local downloads change - _filterLocalDownloads(currentLocalDownloadsByPodcast); - if (_filteredLocalDownloadsByPodcast.isEmpty && _filteredServerDownloadsByPodcast.isEmpty) { if (_searchQuery.isNotEmpty) { // Show no search results message @@ -727,4 +736,233 @@ class _PinepodsDownloadsState extends State { _showErrorSnackBar('Server episode playback not yet implemented'); } + + Future _playLocalEpisode(Episode episode) async { + try { + log.info('Playing local episode: ${episode.title}'); + + final audioPlayerService = Provider.of(context, listen: false); + + // Use the regular audio player service for offline playback + // This bypasses the PinePods service and server dependencies + await audioPlayerService.playEpisode(episode: episode, resume: true); + + log.info('Successfully started local episode playback'); + } catch (e) { + log.severe('Error playing local episode: $e'); + _showErrorSnackBar('Failed to play episode: $e'); + } + } + + Widget _buildOfflinePodcastDropdown(String podcastKey, List episodes, {String? displayName}) { + final isExpanded = _expandedPodcasts.contains(podcastKey); + final title = displayName ?? podcastKey; + + return Card( + margin: const EdgeInsets.symmetric(horizontal: 12.0, vertical: 4.0), + child: Column( + children: [ + ListTile( + leading: Icon( + Icons.offline_pin, + color: Colors.green[700], + ), + title: Text( + title, + style: const TextStyle(fontWeight: FontWeight.bold), + ), + subtitle: Text( + '${episodes.length} episode${episodes.length != 1 ? 's' : ''} available offline' + ), + trailing: Row( + mainAxisSize: MainAxisSize.min, + children: [ + Container( + padding: const EdgeInsets.symmetric(horizontal: 6, vertical: 2), + decoration: BoxDecoration( + color: Colors.green[100], + borderRadius: BorderRadius.circular(8), + ), + child: Text( + 'Offline', + style: TextStyle( + fontSize: 10, + color: Colors.green[700], + fontWeight: FontWeight.w500, + ), + ), + ), + const SizedBox(width: 8), + Icon( + isExpanded ? Icons.expand_less : Icons.expand_more, + ), + ], + ), + onTap: () => _togglePodcastExpansion(podcastKey), + ), + if (isExpanded) + PaginatedEpisodeList( + episodes: episodes, + isServerEpisodes: false, + isOfflineMode: true, + onPlayPressed: (episode) => _playLocalEpisode(episode), + ), + ], + ), + ); + } + + Widget _buildOfflineDownloadsView(Map> localDownloadsByPodcast) { + return MultiSliver( + children: [ + // Offline banner + SliverToBoxAdapter( + child: Container( + width: double.infinity, + padding: const EdgeInsets.all(16.0), + margin: const EdgeInsets.all(12.0), + decoration: BoxDecoration( + color: Colors.orange[100], + border: Border.all(color: Colors.orange[300]!), + borderRadius: BorderRadius.circular(8), + ), + child: Row( + children: [ + Icon( + Icons.cloud_off, + color: Colors.orange[800], + size: 24, + ), + const SizedBox(width: 12), + Expanded( + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + Text( + 'Offline Mode', + style: TextStyle( + fontWeight: FontWeight.bold, + color: Colors.orange[800], + fontSize: 16, + ), + ), + const SizedBox(height: 4), + Text( + 'Server unavailable. Showing local downloads only.', + style: TextStyle( + color: Colors.orange[700], + fontSize: 14, + ), + ), + ], + ), + ), + const SizedBox(width: 12), + ElevatedButton.icon( + onPressed: () { + setState(() { + _errorMessage = null; + }); + _loadDownloads(); + }, + icon: Icon( + Icons.refresh, + size: 16, + color: Colors.orange[800], + ), + label: Text( + 'Retry', + style: TextStyle( + color: Colors.orange[800], + fontSize: 12, + fontWeight: FontWeight.w500, + ), + ), + style: ElevatedButton.styleFrom( + backgroundColor: Colors.orange[50], + elevation: 0, + padding: const EdgeInsets.symmetric(horizontal: 12, vertical: 6), + minimumSize: Size.zero, + tapTargetSize: MaterialTapTargetSize.shrinkWrap, + ), + ), + ], + ), + ), + ), + + // Search bar for filtering local downloads + _buildSearchBar(), + + // Local downloads content + if (localDownloadsByPodcast.isEmpty) + SliverFillRemaining( + hasScrollBody: false, + child: Center( + child: Column( + mainAxisAlignment: MainAxisAlignment.center, + children: [ + Icon( + Icons.cloud_off, + size: 64, + color: Colors.grey[400], + ), + const SizedBox(height: 16), + Text( + 'No local downloads', + style: Theme.of(context).textTheme.headlineSmall, + ), + const SizedBox(height: 8), + Text( + 'Download episodes while online to access them here', + style: Theme.of(context).textTheme.bodyMedium, + textAlign: TextAlign.center, + ), + ], + ), + ), + ) + else + SliverList( + delegate: SliverChildListDelegate([ + // Local downloads header + Padding( + padding: const EdgeInsets.fromLTRB(16, 16, 16, 8), + child: Row( + children: [ + Icon(Icons.smartphone, color: Colors.green[600]), + const SizedBox(width: 8), + Text( + _searchQuery.isEmpty + ? 'Local Downloads' + : 'Local Downloads (${_countFilteredEpisodes(localDownloadsByPodcast)})', + style: Theme.of(context).textTheme.titleLarge?.copyWith( + fontWeight: FontWeight.bold, + color: Colors.green[600], + ), + ), + ], + ), + ), + + // Local downloads by podcast + ...localDownloadsByPodcast.entries.map((entry) { + final podcastName = entry.key; + final episodes = entry.value; + final podcastKey = 'offline_local_$podcastName'; + + return _buildOfflinePodcastDropdown( + podcastKey, + episodes, + displayName: podcastName, + ); + }).toList(), + + // Bottom padding + const SizedBox(height: 100), + ]), + ), + ], + ); + } } \ No newline at end of file diff --git a/mobile/lib/ui/pinepods/episode_details.dart b/mobile/lib/ui/pinepods/episode_details.dart index fc75f8d1..0fb271a5 100644 --- a/mobile/lib/ui/pinepods/episode_details.dart +++ b/mobile/lib/ui/pinepods/episode_details.dart @@ -4,6 +4,7 @@ import 'package:pinepods_mobile/bloc/settings/settings_bloc.dart'; import 'package:pinepods_mobile/services/pinepods/pinepods_service.dart'; import 'package:pinepods_mobile/services/pinepods/pinepods_audio_service.dart'; import 'package:pinepods_mobile/services/audio/audio_player_service.dart'; +import 'package:pinepods_mobile/services/audio/default_audio_player_service.dart'; import 'package:pinepods_mobile/entities/pinepods_episode.dart'; import 'package:pinepods_mobile/entities/pinepods_search.dart'; import 'package:pinepods_mobile/entities/person.dart'; @@ -12,6 +13,9 @@ import 'package:pinepods_mobile/ui/widgets/episode_description.dart'; import 'package:pinepods_mobile/ui/widgets/podcast_image.dart'; import 'package:pinepods_mobile/ui/pinepods/podcast_details.dart'; import 'package:pinepods_mobile/ui/podcast/mini_player.dart'; +import 'package:pinepods_mobile/ui/utils/player_utils.dart'; +import 'package:pinepods_mobile/ui/utils/local_download_utils.dart'; +import 'package:pinepods_mobile/services/global_services.dart'; import 'package:provider/provider.dart'; import 'package:pinepods_mobile/services/audio/audio_player_service.dart'; @@ -29,33 +33,62 @@ class PinepodsEpisodeDetails extends StatefulWidget { class _PinepodsEpisodeDetailsState extends State { final PinepodsService _pinepodsService = PinepodsService(); - PinepodsAudioService? _audioService; + // Use global audio service instead of creating local instance PinepodsEpisode? _episode; bool _isLoading = true; String _errorMessage = ''; List _persons = []; + bool _isDownloadedLocally = false; @override void initState() { super.initState(); _episode = widget.initialEpisode; _loadEpisodeDetails(); + _checkLocalDownloadStatus(); } - void _initializeAudioService() { - if (_audioService != null) return; + PinepodsAudioService? get _audioService => GlobalServices.pinepodsAudioService; + + Future _checkLocalDownloadStatus() async { + if (_episode == null) return; - try { - final audioPlayerService = Provider.of(context, listen: false); - final settingsBloc = Provider.of(context, listen: false); - - _audioService = PinepodsAudioService( - audioPlayerService, - _pinepodsService, - settingsBloc, + final isDownloaded = await LocalDownloadUtils.isEpisodeDownloadedLocally(context, _episode!); + + if (mounted) { + setState(() { + _isDownloadedLocally = isDownloaded; + }); + } + } + + Future _localDownloadEpisode() async { + if (_episode == null) return; + + final success = await LocalDownloadUtils.localDownloadEpisode(context, _episode!); + + if (success) { + LocalDownloadUtils.showSnackBar(context, 'Episode download started', Colors.green); + await _checkLocalDownloadStatus(); // Update button state + } else { + LocalDownloadUtils.showSnackBar(context, 'Failed to start download', Colors.red); + } + } + + Future _deleteLocalDownload() async { + if (_episode == null) return; + + final deletedCount = await LocalDownloadUtils.deleteLocalDownload(context, _episode!); + + if (deletedCount > 0) { + LocalDownloadUtils.showSnackBar( + context, + 'Deleted $deletedCount local download${deletedCount > 1 ? 's' : ''}', + Colors.orange ); - } catch (e) { - // Provider not available - audio service will remain null + await _checkLocalDownloadStatus(); // Update button state + } else { + LocalDownloadUtils.showSnackBar(context, 'Local download not found', Colors.red); } } @@ -80,6 +113,7 @@ class _PinepodsEpisodeDetailsState extends State { } _pinepodsService.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); + GlobalServices.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); final userId = settings.pinepodsUserId!; final episodeDetails = await _pinepodsService.getEpisodeMetadata( @@ -157,7 +191,6 @@ class _PinepodsEpisodeDetailsState extends State { } Future _togglePlayPause() async { - _initializeAudioService(); if (_audioService == null) { _showSnackBar('Audio service not available', Colors.red); @@ -184,8 +217,10 @@ class _PinepodsEpisodeDetailsState extends State { } } else { // Start playing this episode - await _audioService!.playPinepodsEpisode( - pinepodsEpisode: _episode!, + await playPinepodsEpisodeWithOptionalFullScreen( + context, + _audioService!, + _episode!, resume: _episode!.isStarted, ); } @@ -195,7 +230,6 @@ class _PinepodsEpisodeDetailsState extends State { } Future _handleTimestampTap(Duration timestamp) async { - _initializeAudioService(); if (_audioService == null) { _showSnackBar('Audio service not available', Colors.red); @@ -212,8 +246,10 @@ class _PinepodsEpisodeDetailsState extends State { if (!isCurrentEpisode) { // Start playing the episode first - await _audioService!.playPinepodsEpisode( - pinepodsEpisode: _episode!, + await playPinepodsEpisodeWithOptionalFullScreen( + context, + _audioService!, + _episode!, resume: false, // Start from beginning initially ); @@ -482,22 +518,33 @@ class _PinepodsEpisodeDetailsState extends State { } try { - // Create a minimal podcast object using data from episode - same pattern as podcast tile + final settingsBloc = Provider.of(context, listen: false); + final settings = settingsBloc.currentSettings; + final userId = settings.pinepodsUserId; + + if (userId == null) { + _showSnackBar('Not logged in', Colors.red); + return; + } + + // Fetch the actual podcast details to get correct episode count + final podcastDetails = await _pinepodsService.getPodcastDetailsById(_episode!.podcastId!, userId); + final podcast = UnifiedPinepodsPodcast( id: _episode!.podcastId!, indexId: 0, title: _episode!.podcastName, - url: '', // Will be loaded by the details page - originalUrl: '', - link: '', - description: '', - author: '', - ownerName: '', - image: _episode!.episodeArtwork, - artwork: _episode!.episodeArtwork, + url: podcastDetails?['feedurl'] ?? '', + originalUrl: podcastDetails?['feedurl'] ?? '', + link: podcastDetails?['websiteurl'] ?? '', + description: podcastDetails?['description'] ?? '', + author: podcastDetails?['author'] ?? '', + ownerName: podcastDetails?['author'] ?? '', + image: podcastDetails?['artworkurl'] ?? _episode!.episodeArtwork, + artwork: podcastDetails?['artworkurl'] ?? _episode!.episodeArtwork, lastUpdateTime: 0, - explicit: false, - episodeCount: 0, + explicit: podcastDetails?['explicit'] ?? false, + episodeCount: podcastDetails?['episodecount'] ?? 0, ); // Navigate to podcast details - same as podcast tile does @@ -794,6 +841,29 @@ class _PinepodsEpisodeDetailsState extends State { ), ], ), + + const SizedBox(height: 8), + + // Third row: Local Download (full width) + Row( + children: [ + Expanded( + child: OutlinedButton.icon( + onPressed: _isDownloadedLocally ? _deleteLocalDownload : _localDownloadEpisode, + icon: Icon( + _isDownloadedLocally ? Icons.delete_forever_outlined : Icons.file_download_outlined, + color: _isDownloadedLocally ? Colors.red : Colors.green, + ), + label: Text(_isDownloadedLocally ? 'Delete Local Download' : 'Download Locally'), + style: OutlinedButton.styleFrom( + side: BorderSide( + color: _isDownloadedLocally ? Colors.red : Colors.green, + ), + ), + ), + ), + ], + ), ], ), @@ -887,7 +957,7 @@ class _PinepodsEpisodeDetailsState extends State { @override void dispose() { - _audioService?.dispose(); + // Don't dispose global audio service - it should persist across pages super.dispose(); } } \ No newline at end of file diff --git a/mobile/lib/ui/pinepods/episode_search.dart b/mobile/lib/ui/pinepods/episode_search.dart index 7cdf99d0..becd910f 100644 --- a/mobile/lib/ui/pinepods/episode_search.dart +++ b/mobile/lib/ui/pinepods/episode_search.dart @@ -5,8 +5,11 @@ import 'package:pinepods_mobile/entities/pinepods_episode.dart'; import 'package:pinepods_mobile/services/pinepods/pinepods_service.dart'; import 'package:pinepods_mobile/services/pinepods/pinepods_audio_service.dart'; import 'package:pinepods_mobile/services/audio/audio_player_service.dart'; +import 'package:pinepods_mobile/services/global_services.dart'; +import 'package:pinepods_mobile/services/search_history_service.dart'; import 'package:pinepods_mobile/ui/widgets/pinepods_episode_card.dart'; import 'package:pinepods_mobile/ui/widgets/episode_context_menu.dart'; +import 'package:pinepods_mobile/ui/widgets/paginated_episode_list.dart'; import 'package:pinepods_mobile/ui/pinepods/episode_details.dart'; import 'package:provider/provider.dart'; @@ -23,18 +26,20 @@ class EpisodeSearchPage extends StatefulWidget { class _EpisodeSearchPageState extends State with TickerProviderStateMixin { final PinepodsService _pinepodsService = PinepodsService(); + final SearchHistoryService _searchHistoryService = SearchHistoryService(); final TextEditingController _searchController = TextEditingController(); final FocusNode _focusNode = FocusNode(); Timer? _debounceTimer; List _searchResults = []; + List _searchHistory = []; bool _isLoading = false; bool _hasSearched = false; + bool _showHistory = false; String? _errorMessage; String _currentQuery = ''; - // Audio service and context menu state - PinepodsAudioService? _audioService; + // Use global audio service instead of creating local instance int? _contextMenuEpisodeIndex; // Animation controllers @@ -87,32 +92,35 @@ class _EpisodeSearchPageState extends State with TickerProvid settings.pinepodsServer!, settings.pinepodsApiKey!, ); + GlobalServices.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); } _searchController.addListener(_onSearchChanged); + _loadSearchHistory(); } - void _initializeAudioService() { - if (_audioService != null) return; // Already initialized - - try { - final audioPlayerService = Provider.of(context, listen: false); - final settingsBloc = Provider.of(context, listen: false); - - _audioService = PinepodsAudioService( - audioPlayerService, - _pinepodsService, - settingsBloc, - ); - } catch (e) { - // Provider not available - audio service will remain null + Future _loadSearchHistory() async { + final history = await _searchHistoryService.getEpisodeSearchHistory(); + if (mounted) { + setState(() { + _searchHistory = history; + }); } } + void _selectHistoryItem(String searchTerm) { + _searchController.text = searchTerm; + _performSearch(searchTerm); + } + + Future _removeHistoryItem(String searchTerm) async { + await _searchHistoryService.removeEpisodeSearchTerm(searchTerm); + await _loadSearchHistory(); + } + + PinepodsAudioService? get _audioService => GlobalServices.pinepodsAudioService; + Future _playEpisode(PinepodsEpisode episode) async { - // Try to initialize audio service if not already done - _initializeAudioService(); - if (_audioService == null) { ScaffoldMessenger.of(context).showSnackBar( const SnackBar( @@ -360,6 +368,10 @@ class _EpisodeSearchPageState extends State with TickerProvid void _onSearchChanged() { final query = _searchController.text.trim(); + setState(() { + _showHistory = query.isEmpty && _searchHistory.isNotEmpty; + }); + if (_debounceTimer?.isActive ?? false) { _debounceTimer!.cancel(); } @@ -378,8 +390,13 @@ class _EpisodeSearchPageState extends State with TickerProvid setState(() { _isLoading = true; _errorMessage = null; + _showHistory = false; }); + // Save search term to history + await _searchHistoryService.addEpisodeSearchTerm(query); + await _loadSearchHistory(); + // Animate search bar to top _slideAnimationController.forward(); @@ -418,6 +435,7 @@ class _EpisodeSearchPageState extends State with TickerProvid _hasSearched = false; _errorMessage = null; _currentQuery = ''; + _showHistory = _searchHistory.isNotEmpty; }); _fadeAnimationController.reset(); _slideAnimationController.reverse(); @@ -449,6 +467,11 @@ class _EpisodeSearchPageState extends State with TickerProvid controller: _searchController, focusNode: _focusNode, style: Theme.of(context).textTheme.bodyLarge, + onTap: () { + setState(() { + _showHistory = _searchController.text.isEmpty && _searchHistory.isNotEmpty; + }); + }, decoration: InputDecoration( hintText: 'Search for episodes...', hintStyle: Theme.of(context).textTheme.bodyMedium?.copyWith( @@ -467,6 +490,7 @@ class _EpisodeSearchPageState extends State with TickerProvid onPressed: () { _searchController.clear(); _clearResults(); + _focusNode.requestFocus(); }, ) : null, @@ -601,32 +625,99 @@ class _EpisodeSearchPageState extends State with TickerProvid } Widget _buildResults() { + // Convert search results to PinepodsEpisode objects + final episodes = _searchResults.map((result) => result.toPinepodsEpisode()).toList(); + return FadeTransition( opacity: _fadeAnimation, - child: ListView.builder( - shrinkWrap: true, - physics: const NeverScrollableScrollPhysics(), - itemCount: _searchResults.length, - itemBuilder: (context, index) { - final result = _searchResults[index]; - final episode = result.toPinepodsEpisode(); - - return PinepodsEpisodeCard( - episode: episode, - onTap: () { - Navigator.push( - context, - MaterialPageRoute( - builder: (context) => PinepodsEpisodeDetails( - initialEpisode: episode, - ), - ), - ); - }, - onLongPress: () => _showContextMenu(index), - onPlayPressed: () => _playEpisode(episode), + child: PaginatedEpisodeList( + episodes: episodes, + isServerEpisodes: true, + pageSize: 20, // Show 20 episodes at a time for good performance + onEpisodeTap: (episode) { + Navigator.push( + context, + MaterialPageRoute( + builder: (context) => PinepodsEpisodeDetails( + initialEpisode: episode, + ), + ), + ); + }, + onEpisodeLongPress: (episode, globalIndex) { + // Find the original index in _searchResults for context menu + final originalIndex = _searchResults.indexWhere( + (result) => result.episodeId == episode.episodeId ); + if (originalIndex != -1) { + _showContextMenu(originalIndex); + } }, + onPlayPressed: (episode) => _playEpisode(episode), + ), + ); + } + + Widget _buildSearchHistory() { + return Container( + margin: const EdgeInsets.symmetric(horizontal: 16), + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + Row( + children: [ + Text( + 'Recent Searches', + style: Theme.of(context).textTheme.titleMedium?.copyWith( + color: Theme.of(context).primaryColor, + fontWeight: FontWeight.bold, + ), + ), + const Spacer(), + if (_searchHistory.isNotEmpty) + TextButton( + onPressed: () async { + await _searchHistoryService.clearEpisodeSearchHistory(); + await _loadSearchHistory(); + }, + child: Text( + 'Clear All', + style: TextStyle( + color: Theme.of(context).hintColor, + fontSize: 12, + ), + ), + ), + ], + ), + const SizedBox(height: 8), + ..._searchHistory.take(10).map((searchTerm) => Card( + margin: const EdgeInsets.symmetric(vertical: 2), + child: ListTile( + dense: true, + leading: Icon( + Icons.history, + color: Theme.of(context).hintColor, + size: 20, + ), + title: Text( + searchTerm, + style: Theme.of(context).textTheme.bodyMedium, + maxLines: 1, + overflow: TextOverflow.ellipsis, + ), + trailing: IconButton( + icon: Icon( + Icons.close, + size: 18, + color: Theme.of(context).hintColor, + ), + onPressed: () => _removeHistoryItem(searchTerm), + ), + onTap: () => _selectHistoryItem(searchTerm), + ), + )).toList(), + ], ), ); } @@ -696,13 +787,15 @@ class _EpisodeSearchPageState extends State with TickerProvid child: SingleChildScrollView( child: AnimatedSwitcher( duration: const Duration(milliseconds: 300), - child: _isLoading - ? _buildLoadingIndicator() - : _errorMessage != null - ? _buildErrorState() - : _searchResults.isEmpty - ? _buildEmptyState() - : _buildResults(), + child: _showHistory + ? _buildSearchHistory() + : _isLoading + ? _buildLoadingIndicator() + : _errorMessage != null + ? _buildErrorState() + : _searchResults.isEmpty + ? _buildEmptyState() + : _buildResults(), ), ), ), diff --git a/mobile/lib/ui/pinepods/feed.dart b/mobile/lib/ui/pinepods/feed.dart index 07ac263f..f7bcd152 100644 --- a/mobile/lib/ui/pinepods/feed.dart +++ b/mobile/lib/ui/pinepods/feed.dart @@ -5,12 +5,20 @@ import 'package:pinepods_mobile/bloc/podcast/podcast_bloc.dart'; import 'package:pinepods_mobile/services/pinepods/pinepods_service.dart'; import 'package:pinepods_mobile/services/pinepods/pinepods_audio_service.dart'; import 'package:pinepods_mobile/services/audio/audio_player_service.dart'; +import 'package:pinepods_mobile/services/audio/default_audio_player_service.dart'; import 'package:pinepods_mobile/services/download/download_service.dart'; +import 'package:pinepods_mobile/services/logging/app_logger.dart'; import 'package:pinepods_mobile/entities/pinepods_episode.dart'; import 'package:pinepods_mobile/entities/episode.dart'; +import 'package:pinepods_mobile/entities/downloadable.dart'; import 'package:pinepods_mobile/ui/widgets/episode_context_menu.dart'; import 'package:pinepods_mobile/ui/widgets/pinepods_episode_card.dart'; import 'package:pinepods_mobile/ui/pinepods/episode_details.dart'; +import 'package:pinepods_mobile/ui/utils/player_utils.dart'; +import 'package:pinepods_mobile/ui/utils/position_utils.dart'; +import 'package:pinepods_mobile/ui/widgets/server_error_page.dart'; +import 'package:pinepods_mobile/services/error_handling_service.dart'; +import 'package:pinepods_mobile/services/global_services.dart'; import 'package:provider/provider.dart'; class PinepodsFeed extends StatefulWidget { @@ -26,8 +34,9 @@ class _PinepodsFeedState extends State { String _errorMessage = ''; List _episodes = []; final PinepodsService _pinepodsService = PinepodsService(); - PinepodsAudioService? _audioService; + // Use global audio service instead of creating local instance int? _contextMenuEpisodeIndex; // Index of episode showing context menu + Map _localDownloadStatus = {}; // Cache for local download status @override void initState() { @@ -35,22 +44,13 @@ class _PinepodsFeedState extends State { _loadRecentEpisodes(); } - void _initializeAudioService() { - if (_audioService != null) return; // Already initialized - - try { - final audioPlayerService = Provider.of(context, listen: false); - final settingsBloc = Provider.of(context, listen: false); - - _audioService = PinepodsAudioService( - audioPlayerService, - _pinepodsService, - settingsBloc, - ); - } catch (e) { - // Provider not available - audio service will remain null - // This is fine, we'll handle it in the play method + PinepodsAudioService? get _audioService { + final service = GlobalServices.pinepodsAudioService; + if (service == null) { + final logger = AppLogger(); + logger.error('Feed', 'Global audio service is null - this should not happen'); } + return service; } Future _loadRecentEpisodes() async { @@ -73,18 +73,30 @@ class _PinepodsFeedState extends State { return; } - // Set credentials in the service + // Set credentials in both local and global services _pinepodsService.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); + GlobalServices.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); // Use the stored user ID from login final userId = settings.pinepodsUserId!; final episodes = await _pinepodsService.getRecentEpisodes(userId); + // Enrich episodes with best available positions (local vs server) + final enrichedEpisodes = await PositionUtils.enrichEpisodesWithBestPositions( + context, + _pinepodsService, + episodes, + userId, + ); + setState(() { - _episodes = episodes; + _episodes = enrichedEpisodes; _isLoading = false; }); + + // After loading episodes, check their local download status + await _loadLocalDownloadStatuses(); } catch (e) { setState(() { _errorMessage = 'Failed to load recent episodes: ${e.toString()}'; @@ -93,15 +105,63 @@ class _PinepodsFeedState extends State { } } + // Proactively load local download status for all episodes + Future _loadLocalDownloadStatuses() async { + final logger = AppLogger(); + logger.debug('Feed', 'Loading local download statuses for ${_episodes.length} episodes'); + + try { + final podcastBloc = Provider.of(context, listen: false); + + // Get all downloaded episodes from repository + final allEpisodes = await podcastBloc.podcastService.repository.findAllEpisodes(); + logger.debug('Feed', 'Found ${allEpisodes.length} total episodes in repository'); + + // Filter to PinePods episodes only and log them + final pinepodsEpisodes = allEpisodes.where((ep) => ep.guid.startsWith('pinepods_')).toList(); + logger.debug('Feed', 'Found ${pinepodsEpisodes.length} PinePods episodes in repository'); + + // Found pinepods episodes in repository + + // Now check each feed episode against the repository + for (final episode in _episodes) { + final guid = _generateEpisodeGuid(episode); + + // Look for episodes with either new format (pinepods_123) or old format (pinepods_123_timestamp) + final matchingEpisodes = allEpisodes.where((ep) => + ep.guid == guid || ep.guid.startsWith('${guid}_') + ).toList(); + + // Checking for matching episodes + + // Consider downloaded if ANY matching episode is downloaded + final isDownloaded = matchingEpisodes.any((ep) => + ep.downloaded || ep.downloadState == DownloadState.downloaded + ); + + _localDownloadStatus[guid] = isDownloaded; + // Episode status checked + } + + // Download statuses cached + + } catch (e) { + logger.error('Feed', 'Error loading local download statuses', e.toString()); + } + } + Future _refresh() async { + // Clear local download status cache on refresh + _localDownloadStatus.clear(); await _loadRecentEpisodes(); } Future _playEpisode(PinepodsEpisode episode) async { - // Try to initialize audio service if not already done - _initializeAudioService(); + final logger = AppLogger(); + logger.info('Feed', 'Attempting to play episode: ${episode.episodeTitle}'); if (_audioService == null) { + logger.error('Feed', 'Audio service not available for episode: ${episode.episodeTitle}'); ScaffoldMessenger.of(context).showSnackBar( const SnackBar( content: Text('Audio service not available'), @@ -131,11 +191,15 @@ class _PinepodsFeedState extends State { ); // Start playing the episode with full PinePods integration - await _audioService!.playPinepodsEpisode( - pinepodsEpisode: episode, + await playPinepodsEpisodeWithOptionalFullScreen( + context, + _audioService!, + episode, resume: episode.isStarted, // Resume if episode was previously started ); + logger.info('Feed', 'Successfully started playing episode: ${episode.episodeTitle}'); + // Show success message ScaffoldMessenger.of(context).showSnackBar( SnackBar( @@ -145,6 +209,8 @@ class _PinepodsFeedState extends State { ), ); } catch (e) { + logger.error('Feed', 'Failed to play episode: ${episode.episodeTitle}', e.toString()); + // Show error message ScaffoldMessenger.of(context).showSnackBar( SnackBar( @@ -156,10 +222,56 @@ class _PinepodsFeedState extends State { } } - void _showContextMenu(int episodeIndex) { - setState(() { - _contextMenuEpisodeIndex = episodeIndex; - }); + Future _showContextMenu(int episodeIndex) async { + final episode = _episodes[episodeIndex]; + final isDownloadedLocally = await _isEpisodeDownloadedLocally(episode); + + if (!mounted) return; + + showDialog( + context: context, + barrierColor: Colors.black.withOpacity(0.3), + builder: (context) => EpisodeContextMenu( + episode: episode, + isDownloadedLocally: isDownloadedLocally, + onSave: () { + Navigator.of(context).pop(); + _saveEpisode(episodeIndex); + }, + onRemoveSaved: () { + Navigator.of(context).pop(); + _removeSavedEpisode(episodeIndex); + }, + onDownload: episode.downloaded + ? () { + Navigator.of(context).pop(); + _deleteEpisode(episodeIndex); + } + : () { + Navigator.of(context).pop(); + _downloadEpisode(episodeIndex); + }, + onLocalDownload: () { + Navigator.of(context).pop(); + _localDownloadEpisode(episodeIndex); + }, + onDeleteLocalDownload: () { + Navigator.of(context).pop(); + _deleteLocalDownload(episodeIndex); + }, + onQueue: () { + Navigator.of(context).pop(); + _toggleQueueEpisode(episodeIndex); + }, + onMarkComplete: () { + Navigator.of(context).pop(); + _toggleMarkComplete(episodeIndex); + }, + onDismiss: () { + Navigator.of(context).pop(); + }, + ), + ); } void _hideContextMenu() { @@ -450,7 +562,7 @@ class _PinepodsFeedState extends State { try { // Convert PinepodsEpisode to Episode for local download final localEpisode = Episode( - guid: 'pinepods_${episode.episodeId}_${DateTime.now().millisecondsSinceEpoch}', + guid: _generateEpisodeGuid(episode), pguid: 'pinepods_${episode.podcastName.replaceAll(' ', '_').toLowerCase()}', podcast: episode.podcastName, title: episode.episodeTitle, @@ -467,34 +579,128 @@ class _PinepodsFeedState extends State { chapters: [], transcriptUrls: [], ); - - print('DEBUG: Created local episode with GUID: ${localEpisode.guid}'); - print('DEBUG: Episode title: ${localEpisode.title}'); - print('DEBUG: Episode URL: ${localEpisode.contentUrl}'); + final logger = AppLogger(); + logger.debug('Feed', 'Created local episode with GUID: ${localEpisode.guid}'); + logger.debug('Feed', 'Episode title: ${localEpisode.title}'); + logger.debug('Feed', 'Episode URL: ${localEpisode.contentUrl}'); final podcastBloc = Provider.of(context, listen: false); // First save the episode to the repository so it can be tracked await podcastBloc.podcastService.saveEpisode(localEpisode); - print('DEBUG: Episode saved to repository'); + logger.debug('Feed', 'Episode saved to repository'); // Use the download service from podcast bloc final success = await podcastBloc.downloadService.downloadEpisode(localEpisode); - print('DEBUG: Download service result: $success'); + logger.debug('Feed', 'Download service result: $success'); if (success) { + _updateLocalDownloadStatus(episode, true); _showSnackBar('Episode download started', Colors.green); } else { _showSnackBar('Failed to start download', Colors.red); } } catch (e) { - print('DEBUG: Error in local download: $e'); + final logger = AppLogger(); + logger.error('Feed', 'Error in local download for episode: ${episode.episodeTitle}', e.toString()); _showSnackBar('Error starting local download: $e', Colors.red); } _hideContextMenu(); } + Future _deleteLocalDownload(int episodeIndex) async { + final episode = _episodes[episodeIndex]; + final logger = AppLogger(); + + try { + final podcastBloc = Provider.of(context, listen: false); + final guid = _generateEpisodeGuid(episode); + + // Get all episodes and find matches with both new and old GUID formats + final allEpisodes = await podcastBloc.podcastService.repository.findAllEpisodes(); + final matchingEpisodes = allEpisodes.where((ep) => + ep.guid == guid || ep.guid.startsWith('${guid}_') + ).toList(); + + logger.debug('Feed', 'Found ${matchingEpisodes.length} episodes to delete for $guid'); + + if (matchingEpisodes.isNotEmpty) { + // Delete ALL matching episodes (handles duplicates from old timestamp GUIDs) + for (final localEpisode in matchingEpisodes) { + logger.debug('Feed', 'Deleting episode: ${localEpisode.guid}'); + await podcastBloc.podcastService.repository.deleteEpisode(localEpisode); + } + + // Update cache + _updateLocalDownloadStatus(episode, false); + + final deletedCount = matchingEpisodes.length; + _showSnackBar('Deleted $deletedCount local download${deletedCount > 1 ? 's' : ''}', Colors.orange); + } else { + _showSnackBar('Local download not found', Colors.red); + } + } catch (e) { + logger.error('Feed', 'Error deleting local download for episode: ${episode.episodeTitle}', e.toString()); + _showSnackBar('Error deleting local download: $e', Colors.red); + } + + _hideContextMenu(); + } + + // Generate consistent GUID for PinePods episodes for local downloads + String _generateEpisodeGuid(PinepodsEpisode episode) { + return 'pinepods_${episode.episodeId}'; + } + + // Check if episode is downloaded locally + Future _isEpisodeDownloadedLocally(PinepodsEpisode episode) async { + final guid = _generateEpisodeGuid(episode); + final logger = AppLogger(); + logger.debug('Feed', 'Checking download status for episode: ${episode.episodeTitle}, GUID: $guid'); + + // Check cache first + if (_localDownloadStatus.containsKey(guid)) { + logger.debug('Feed', 'Found cached status for $guid: ${_localDownloadStatus[guid]}'); + return _localDownloadStatus[guid]!; + } + + try { + final podcastBloc = Provider.of(context, listen: false); + + // Get all episodes and find matches with both new and old GUID formats + final allEpisodes = await podcastBloc.podcastService.repository.findAllEpisodes(); + final matchingEpisodes = allEpisodes.where((ep) => + ep.guid == guid || ep.guid.startsWith('${guid}_') + ).toList(); + + logger.debug('Feed', 'Repository lookup for $guid: found ${matchingEpisodes.length} matching episodes'); + + // Found matching episodes + + // Consider downloaded if ANY matching episode is downloaded + final isDownloaded = matchingEpisodes.any((ep) => + ep.downloaded || ep.downloadState == DownloadState.downloaded + ); + + logger.debug('Feed', 'Final download status for $guid: $isDownloaded'); + + // Cache the result + _localDownloadStatus[guid] = isDownloaded; + return isDownloaded; + } catch (e) { + final logger = AppLogger(); + logger.error('Feed', 'Error checking local download status for episode: ${episode.episodeTitle}', e.toString()); + return false; + } + } + + // Update local download status cache + void _updateLocalDownloadStatus(PinepodsEpisode episode, bool isDownloaded) { + final guid = _generateEpisodeGuid(episode); + _localDownloadStatus[guid] = isDownloaded; + } + // Helper method to update episode properties efficiently PinepodsEpisode _updateEpisodeProperty( PinepodsEpisode episode, { @@ -533,62 +739,13 @@ class _PinepodsFeedState extends State { @override void dispose() { - _audioService?.dispose(); + // Don't dispose global audio service - it should persist across pages super.dispose(); } @override Widget build(BuildContext context) { - // Show context menu as a modal overlay if needed - if (_contextMenuEpisodeIndex != null) { - final episodeIndex = _contextMenuEpisodeIndex!; // Store locally to avoid null issues - WidgetsBinding.instance.addPostFrameCallback((_) { - showDialog( - context: context, - barrierColor: Colors.black.withOpacity(0.3), - builder: (context) => EpisodeContextMenu( - episode: _episodes[episodeIndex], - onSave: () { - Navigator.of(context).pop(); - _saveEpisode(episodeIndex); - }, - onRemoveSaved: () { - Navigator.of(context).pop(); - _removeSavedEpisode(episodeIndex); - }, - onDownload: _episodes[episodeIndex].downloaded - ? () { - Navigator.of(context).pop(); - _deleteEpisode(episodeIndex); - } - : () { - Navigator.of(context).pop(); - _downloadEpisode(episodeIndex); - }, - onLocalDownload: () { - Navigator.of(context).pop(); - _localDownloadEpisode(episodeIndex); - }, - onQueue: () { - Navigator.of(context).pop(); - _toggleQueueEpisode(episodeIndex); - }, - onMarkComplete: () { - Navigator.of(context).pop(); - _toggleMarkComplete(episodeIndex); - }, - onDismiss: () { - Navigator.of(context).pop(); - _hideContextMenu(); - }, - ), - ); - }); - // Reset the context menu index after storing it locally - _contextMenuEpisodeIndex = null; - } - if (_isLoading) { return const SliverFillRemaining( child: Center( @@ -605,35 +762,15 @@ class _PinepodsFeedState extends State { } if (_errorMessage.isNotEmpty) { - return SliverFillRemaining( - child: Center( - child: Padding( - padding: const EdgeInsets.all(16.0), - child: Column( - mainAxisAlignment: MainAxisAlignment.center, - children: [ - Icon( - Icons.error_outline, - color: Theme.of(context).colorScheme.error, - size: 48, - ), - const SizedBox(height: 16), - Text( - _errorMessage, - style: TextStyle( - color: Theme.of(context).colorScheme.error, - ), - textAlign: TextAlign.center, - ), - const SizedBox(height: 16), - ElevatedButton( - onPressed: _refresh, - child: const Text('Retry'), - ), - ], - ), - ), - ), + return SliverServerErrorPage( + errorMessage: _errorMessage.isServerConnectionError + ? null + : _errorMessage, + onRetry: _refresh, + title: 'Feed Unavailable', + subtitle: _errorMessage.isServerConnectionError + ? 'Unable to connect to the PinePods server' + : 'Failed to load recent episodes', ); } @@ -715,7 +852,6 @@ class _PinepodsFeedState extends State { }, onLongPress: () => _showContextMenu(episodeIndex), onPlayPressed: () => _playEpisode(_episodes[episodeIndex]), - onDownloadPressed: () => _downloadEpisode(episodeIndex), ); }, childCount: _episodes.length + 1, // +1 for header diff --git a/mobile/lib/ui/pinepods/history.dart b/mobile/lib/ui/pinepods/history.dart index e39a76cb..0a56a79a 100644 --- a/mobile/lib/ui/pinepods/history.dart +++ b/mobile/lib/ui/pinepods/history.dart @@ -8,6 +8,12 @@ import 'package:pinepods_mobile/entities/pinepods_episode.dart'; import 'package:pinepods_mobile/ui/widgets/episode_context_menu.dart'; import 'package:pinepods_mobile/ui/widgets/pinepods_episode_card.dart'; import 'package:pinepods_mobile/ui/pinepods/episode_details.dart'; +import 'package:pinepods_mobile/ui/utils/local_download_utils.dart'; +import 'package:pinepods_mobile/ui/utils/player_utils.dart'; +import 'package:pinepods_mobile/ui/utils/position_utils.dart'; +import 'package:pinepods_mobile/ui/widgets/server_error_page.dart'; +import 'package:pinepods_mobile/services/error_handling_service.dart'; +import 'package:pinepods_mobile/services/global_services.dart'; import 'package:provider/provider.dart'; import 'package:sliver_tools/sliver_tools.dart'; @@ -24,7 +30,7 @@ class _PinepodsHistoryState extends State { List _episodes = []; List _filteredEpisodes = []; final PinepodsService _pinepodsService = PinepodsService(); - PinepodsAudioService? _audioService; + // Use global audio service instead of creating local instance int? _contextMenuEpisodeIndex; final TextEditingController _searchController = TextEditingController(); String _searchQuery = ''; @@ -39,7 +45,7 @@ class _PinepodsHistoryState extends State { @override void dispose() { _searchController.dispose(); - _audioService?.dispose(); + // Don't dispose global audio service - it should persist across pages super.dispose(); } @@ -61,22 +67,7 @@ class _PinepodsHistoryState extends State { } } - void _initializeAudioService() { - if (_audioService != null) return; - - try { - final audioPlayerService = Provider.of(context, listen: false); - final settingsBloc = Provider.of(context, listen: false); - - _audioService = PinepodsAudioService( - audioPlayerService, - _pinepodsService, - settingsBloc, - ); - } catch (e) { - // Provider not available - audio service will remain null - } - } + PinepodsAudioService? get _audioService => GlobalServices.pinepodsAudioService; Future _loadHistory() async { setState(() { @@ -99,12 +90,21 @@ class _PinepodsHistoryState extends State { } _pinepodsService.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); + GlobalServices.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); final userId = settings.pinepodsUserId!; final episodes = await _pinepodsService.getUserHistory(userId); + // Enrich episodes with best available positions (local vs server) + final enrichedEpisodes = await PositionUtils.enrichEpisodesWithBestPositions( + context, + _pinepodsService, + episodes, + userId, + ); + setState(() { - _episodes = episodes; + _episodes = enrichedEpisodes; // Sort episodes by publication date (newest first) _episodes.sort((a, b) { try { @@ -118,6 +118,9 @@ class _PinepodsHistoryState extends State { _filterEpisodes(); // Initialize filtered list _isLoading = false; }); + + // After loading episodes, check their local download status + await LocalDownloadUtils.loadLocalDownloadStatuses(context, enrichedEpisodes); } catch (e) { setState(() { _errorMessage = 'Failed to load listening history: ${e.toString()}'; @@ -127,11 +130,12 @@ class _PinepodsHistoryState extends State { } Future _refresh() async { + // Clear local download status cache on refresh + LocalDownloadUtils.clearCache(); await _loadHistory(); } Future _playEpisode(PinepodsEpisode episode) async { - _initializeAudioService(); if (_audioService == null) { ScaffoldMessenger.of(context).showSnackBar( @@ -184,10 +188,84 @@ class _PinepodsHistoryState extends State { } } - void _showContextMenu(int episodeIndex) { - setState(() { - _contextMenuEpisodeIndex = episodeIndex; - }); + Future _showContextMenu(int episodeIndex) async { + final episode = _episodes[episodeIndex]; + final isDownloadedLocally = await LocalDownloadUtils.isEpisodeDownloadedLocally(context, episode); + + if (!mounted) return; + + showDialog( + context: context, + barrierColor: Colors.black.withOpacity(0.3), + builder: (context) => EpisodeContextMenu( + episode: episode, + isDownloadedLocally: isDownloadedLocally, + onSave: () { + Navigator.of(context).pop(); + _saveEpisode(episodeIndex); + }, + onRemoveSaved: () { + Navigator.of(context).pop(); + _removeSavedEpisode(episodeIndex); + }, + onDownload: episode.downloaded + ? () { + Navigator.of(context).pop(); + _deleteEpisode(episodeIndex); + } + : () { + Navigator.of(context).pop(); + _downloadEpisode(episodeIndex); + }, + onLocalDownload: () { + Navigator.of(context).pop(); + _localDownloadEpisode(episodeIndex); + }, + onDeleteLocalDownload: () { + Navigator.of(context).pop(); + _deleteLocalDownload(episodeIndex); + }, + onQueue: () { + Navigator.of(context).pop(); + _toggleQueueEpisode(episodeIndex); + }, + onMarkComplete: () { + Navigator.of(context).pop(); + _toggleMarkComplete(episodeIndex); + }, + onDismiss: () { + Navigator.of(context).pop(); + }, + ), + ); + } + + Future _localDownloadEpisode(int episodeIndex) async { + final episode = _episodes[episodeIndex]; + + final success = await LocalDownloadUtils.localDownloadEpisode(context, episode); + + if (success) { + LocalDownloadUtils.showSnackBar(context, 'Episode download started', Colors.green); + } else { + LocalDownloadUtils.showSnackBar(context, 'Failed to start download', Colors.red); + } + } + + Future _deleteLocalDownload(int episodeIndex) async { + final episode = _episodes[episodeIndex]; + + final deletedCount = await LocalDownloadUtils.deleteLocalDownload(context, episode); + + if (deletedCount > 0) { + LocalDownloadUtils.showSnackBar( + context, + 'Deleted $deletedCount local download${deletedCount > 1 ? 's' : ''}', + Colors.orange + ); + } else { + LocalDownloadUtils.showSnackBar(context, 'Local download not found', Colors.red); + } } void _hideContextMenu() { @@ -219,6 +297,7 @@ class _PinepodsHistoryState extends State { if (success) { setState(() { _episodes[episodeIndex] = _updateEpisodeProperty(_episodes[episodeIndex], saved: true); + _filterEpisodes(); // Update filtered list to reflect changes }); _showSnackBar('Episode saved!', Colors.green); } else { @@ -254,6 +333,7 @@ class _PinepodsHistoryState extends State { if (success) { setState(() { _episodes[episodeIndex] = _updateEpisodeProperty(_episodes[episodeIndex], saved: false); + _filterEpisodes(); // Update filtered list to reflect changes }); _showSnackBar('Removed from saved episodes', Colors.orange); } else { @@ -289,6 +369,7 @@ class _PinepodsHistoryState extends State { if (success) { setState(() { _episodes[episodeIndex] = _updateEpisodeProperty(episode, downloaded: true); + _filterEpisodes(); // Update filtered list to reflect changes }); _showSnackBar('Episode download queued!', Colors.green); } else { @@ -324,6 +405,7 @@ class _PinepodsHistoryState extends State { if (success) { setState(() { _episodes[episodeIndex] = _updateEpisodeProperty(episode, downloaded: false); + _filterEpisodes(); // Update filtered list to reflect changes }); _showSnackBar('Episode deleted from server', Colors.orange); } else { @@ -360,6 +442,7 @@ class _PinepodsHistoryState extends State { if (success) { setState(() { _episodes[episodeIndex] = _updateEpisodeProperty(episode, queued: false); + _filterEpisodes(); // Update filtered list to reflect changes }); _showSnackBar('Removed from queue', Colors.orange); } @@ -372,6 +455,7 @@ class _PinepodsHistoryState extends State { if (success) { setState(() { _episodes[episodeIndex] = _updateEpisodeProperty(episode, queued: true); + _filterEpisodes(); // Update filtered list to reflect changes }); _showSnackBar('Added to queue!', Colors.green); } @@ -411,6 +495,7 @@ class _PinepodsHistoryState extends State { if (success) { setState(() { _episodes[episodeIndex] = _updateEpisodeProperty(episode, completed: false); + _filterEpisodes(); // Update filtered list to reflect changes }); _showSnackBar('Marked as incomplete', Colors.orange); } @@ -423,6 +508,7 @@ class _PinepodsHistoryState extends State { if (success) { setState(() { _episodes[episodeIndex] = _updateEpisodeProperty(episode, completed: true); + _filterEpisodes(); // Update filtered list to reflect changes }); _showSnackBar('Marked as complete!', Colors.green); } @@ -476,50 +562,6 @@ class _PinepodsHistoryState extends State { @override Widget build(BuildContext context) { - // Show context menu as a modal overlay if needed - if (_contextMenuEpisodeIndex != null) { - final episodeIndex = _contextMenuEpisodeIndex!; - WidgetsBinding.instance.addPostFrameCallback((_) { - showDialog( - context: context, - barrierColor: Colors.black.withOpacity(0.3), - builder: (context) => EpisodeContextMenu( - episode: _episodes[episodeIndex], - onSave: () { - Navigator.of(context).pop(); - _saveEpisode(episodeIndex); - }, - onRemoveSaved: () { - Navigator.of(context).pop(); - _removeSavedEpisode(episodeIndex); - }, - onDownload: _episodes[episodeIndex].downloaded - ? () { - Navigator.of(context).pop(); - _deleteEpisode(episodeIndex); - } - : () { - Navigator.of(context).pop(); - _downloadEpisode(episodeIndex); - }, - onQueue: () { - Navigator.of(context).pop(); - _toggleQueueEpisode(episodeIndex); - }, - onMarkComplete: () { - Navigator.of(context).pop(); - _toggleMarkComplete(episodeIndex); - }, - onDismiss: () { - Navigator.of(context).pop(); - _hideContextMenu(); - }, - ), - ); - }); - _contextMenuEpisodeIndex = null; - } - if (_isLoading) { return const SliverFillRemaining( child: Center( @@ -536,35 +578,15 @@ class _PinepodsHistoryState extends State { } if (_errorMessage.isNotEmpty) { - return SliverFillRemaining( - child: Center( - child: Padding( - padding: const EdgeInsets.all(16.0), - child: Column( - mainAxisAlignment: MainAxisAlignment.center, - children: [ - Icon( - Icons.error_outline, - color: Theme.of(context).colorScheme.error, - size: 48, - ), - const SizedBox(height: 16), - Text( - _errorMessage, - style: TextStyle( - color: Theme.of(context).colorScheme.error, - ), - textAlign: TextAlign.center, - ), - const SizedBox(height: 16), - ElevatedButton( - onPressed: _refresh, - child: const Text('Retry'), - ), - ], - ), - ), - ), + return SliverServerErrorPage( + errorMessage: _errorMessage.isServerConnectionError + ? null + : _errorMessage, + onRetry: _refresh, + title: 'History Unavailable', + subtitle: _errorMessage.isServerConnectionError + ? 'Unable to connect to the PinePods server' + : 'Failed to load listening history', ); } @@ -712,9 +734,8 @@ class _PinepodsHistoryState extends State { ), ); }, - onLongPress: () => _showContextMenu(originalIndex), + onLongPress: originalIndex >= 0 ? () => _showContextMenu(originalIndex) : null, onPlayPressed: () => _playEpisode(episode), - onDownloadPressed: () => _downloadEpisode(originalIndex), ); }, childCount: _filteredEpisodes.length + 1, // +1 for header diff --git a/mobile/lib/ui/pinepods/home.dart b/mobile/lib/ui/pinepods/home.dart index 332a55b2..492dcfbd 100644 --- a/mobile/lib/ui/pinepods/home.dart +++ b/mobile/lib/ui/pinepods/home.dart @@ -1,21 +1,28 @@ // lib/ui/pinepods/home.dart import 'package:flutter/material.dart'; import 'package:pinepods_mobile/bloc/settings/settings_bloc.dart'; +import 'package:pinepods_mobile/bloc/podcast/podcast_bloc.dart'; import 'package:pinepods_mobile/services/pinepods/pinepods_service.dart'; import 'package:pinepods_mobile/services/pinepods/pinepods_audio_service.dart'; import 'package:pinepods_mobile/services/audio/audio_player_service.dart'; +import 'package:pinepods_mobile/services/global_services.dart'; import 'package:pinepods_mobile/entities/home_data.dart'; import 'package:pinepods_mobile/entities/pinepods_episode.dart'; +import 'package:pinepods_mobile/entities/episode.dart'; import 'package:pinepods_mobile/ui/pinepods/feed.dart'; import 'package:pinepods_mobile/ui/pinepods/saved.dart'; import 'package:pinepods_mobile/ui/pinepods/downloads.dart'; import 'package:pinepods_mobile/ui/pinepods/queue.dart'; import 'package:pinepods_mobile/ui/pinepods/history.dart'; import 'package:pinepods_mobile/ui/pinepods/playlists.dart'; +import 'package:pinepods_mobile/ui/pinepods/playlist_episodes.dart'; import 'package:pinepods_mobile/ui/pinepods/episode_details.dart'; import 'package:pinepods_mobile/ui/pinepods/podcast_details.dart'; import 'package:pinepods_mobile/entities/pinepods_search.dart'; import 'package:pinepods_mobile/ui/widgets/episode_context_menu.dart'; +import 'package:pinepods_mobile/ui/utils/player_utils.dart'; +import 'package:pinepods_mobile/ui/widgets/server_error_page.dart'; +import 'package:pinepods_mobile/services/error_handling_service.dart'; import 'package:provider/provider.dart'; import 'package:intl/intl.dart'; @@ -33,8 +40,7 @@ class _PinepodsHomeState extends State { PlaylistResponse? _playlistData; final PinepodsService _pinepodsService = PinepodsService(); - // Audio service and context menu state - PinepodsAudioService? _audioService; + // Use global audio service instead of creating local instance int? _contextMenuEpisodeIndex; bool _isContextMenuForContinueListening = false; @@ -68,6 +74,7 @@ class _PinepodsHomeState extends State { settings.pinepodsServer!, settings.pinepodsApiKey!, ); + GlobalServices.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); // Load home data and playlists in parallel final futures = await Future.wait([ @@ -76,8 +83,8 @@ class _PinepodsHomeState extends State { ]); setState(() { - _homeData = futures[0] as HomeOverview?; - _playlistData = futures[1] as PlaylistResponse?; + _homeData = futures[0] as HomeOverview; + _playlistData = futures[1] as PlaylistResponse; _isLoading = false; }); } catch (e) { @@ -88,27 +95,9 @@ class _PinepodsHomeState extends State { } } - void _initializeAudioService() { - if (_audioService != null) return; // Already initialized - - try { - final audioPlayerService = Provider.of(context, listen: false); - final settingsBloc = Provider.of(context, listen: false); - - _audioService = PinepodsAudioService( - audioPlayerService, - _pinepodsService, - settingsBloc, - ); - } catch (e) { - // Provider not available - audio service will remain null - } - } + PinepodsAudioService? get _audioService => GlobalServices.pinepodsAudioService; Future _playEpisode(HomeEpisode homeEpisode) async { - // Try to initialize audio service if not already done - _initializeAudioService(); - if (_audioService == null) { _showSnackBar('Audio service not available', Colors.red); return; @@ -133,7 +122,11 @@ class _PinepodsHomeState extends State { ); try { - await _audioService!.playPinepodsEpisode(pinepodsEpisode: episode); + await playPinepodsEpisodeWithOptionalFullScreen( + context, + _audioService!, + episode, + ); } catch (e) { if (mounted) { _showSnackBar('Failed to play episode: $e', Colors.red); @@ -171,6 +164,7 @@ class _PinepodsHomeState extends State { } _pinepodsService.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); + GlobalServices.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); try { final success = await _pinepodsService.saveEpisode( @@ -213,6 +207,7 @@ class _PinepodsHomeState extends State { } _pinepodsService.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); + GlobalServices.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); try { final success = await _pinepodsService.removeSavedEpisode( @@ -254,6 +249,7 @@ class _PinepodsHomeState extends State { } _pinepodsService.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); + GlobalServices.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); try { final success = await _pinepodsService.downloadEpisode( @@ -279,6 +275,53 @@ class _PinepodsHomeState extends State { } } + Future _localDownloadEpisode(int episodeIndex, bool isContinueListening) async { + final episodes = isContinueListening + ? _homeData!.inProgressEpisodes + : _homeData!.recentEpisodes; + final homeEpisode = episodes[episodeIndex]; + + try { + // Convert HomeEpisode to Episode for local download + final localEpisode = Episode( + guid: 'pinepods_${homeEpisode.episodeId}_${DateTime.now().millisecondsSinceEpoch}', + pguid: 'pinepods_${homeEpisode.podcastName.replaceAll(' ', '_').toLowerCase()}', + podcast: homeEpisode.podcastName, + title: homeEpisode.episodeTitle, + description: homeEpisode.episodeDescription, + imageUrl: homeEpisode.episodeArtwork, + contentUrl: homeEpisode.episodeUrl, + duration: homeEpisode.episodeDuration, + publicationDate: DateTime.tryParse(homeEpisode.episodePubDate), + author: homeEpisode.podcastName, + season: 0, + episode: 0, + position: homeEpisode.listenDuration ?? 0, + played: homeEpisode.completed, + chapters: [], + transcriptUrls: [], + ); + + final podcastBloc = Provider.of(context, listen: false); + + // First save the episode to the repository so it can be tracked + await podcastBloc.podcastService.saveEpisode(localEpisode); + + // Use the download service from podcast bloc + final success = await podcastBloc.downloadService.downloadEpisode(localEpisode); + + if (success) { + _showSnackBar('Episode download started', Colors.green); + } else { + _showSnackBar('Failed to start download', Colors.red); + } + } catch (e) { + _showSnackBar('Error starting local download: $e', Colors.red); + } + + _hideContextMenu(); + } + Future _deleteEpisode(int episodeIndex, bool isContinueListening) async { final episodes = isContinueListening ? _homeData!.inProgressEpisodes @@ -295,6 +338,7 @@ class _PinepodsHomeState extends State { } _pinepodsService.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); + GlobalServices.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); try { final success = await _pinepodsService.deleteEpisode( @@ -336,6 +380,7 @@ class _PinepodsHomeState extends State { } _pinepodsService.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); + GlobalServices.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); try { bool success; @@ -397,6 +442,7 @@ class _PinepodsHomeState extends State { } _pinepodsService.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); + GlobalServices.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); try { bool success; @@ -529,6 +575,10 @@ class _PinepodsHomeState extends State { Navigator.of(context).pop(); _downloadEpisode(episodeIndex, _isContextMenuForContinueListening); }, + onLocalDownload: () { + Navigator.of(context).pop(); + _localDownloadEpisode(episodeIndex, _isContextMenuForContinueListening); + }, onQueue: () { Navigator.of(context).pop(); _toggleQueueEpisode(episodeIndex, _isContextMenuForContinueListening); @@ -565,36 +615,15 @@ class _PinepodsHomeState extends State { ), ) else if (_errorMessage.isNotEmpty) - Padding( - padding: const EdgeInsets.all(16.0), - child: Card( - color: Theme.of(context).colorScheme.errorContainer, - child: Padding( - padding: const EdgeInsets.all(16.0), - child: Column( - children: [ - Icon( - Icons.error_outline, - color: Theme.of(context).colorScheme.onErrorContainer, - size: 48, - ), - const SizedBox(height: 16), - Text( - _errorMessage, - style: TextStyle( - color: Theme.of(context).colorScheme.onErrorContainer, - ), - textAlign: TextAlign.center, - ), - const SizedBox(height: 16), - FilledButton( - onPressed: _loadHomeContent, - child: const Text('Retry'), - ), - ], - ), - ), - ), + ServerErrorPage( + errorMessage: _errorMessage.isServerConnectionError + ? null + : _errorMessage, + onRetry: _loadHomeContent, + title: 'Home Unavailable', + subtitle: _errorMessage.isServerConnectionError + ? 'Unable to connect to the PinePods server' + : 'Failed to load home content', ) else if (_homeData != null) Padding( @@ -950,6 +979,9 @@ class _StatCard extends StatelessWidget { @override Widget build(BuildContext context) { return Card( + shape: RoundedRectangleBorder( + borderRadius: BorderRadius.circular(16), + ), child: Padding( padding: const EdgeInsets.all(16.0), child: Column( @@ -963,11 +995,6 @@ class _StatCard extends StatelessWidget { color: color, ), ), - Text( - title, - style: Theme.of(context).textTheme.bodySmall, - textAlign: TextAlign.center, - ), ], ), ), @@ -1233,13 +1260,12 @@ class _PlaylistCard extends StatelessWidget { return SizedBox( width: 200, child: Card( + shape: RoundedRectangleBorder( + borderRadius: BorderRadius.circular(16), + ), child: InkWell( - onTap: () { - ScaffoldMessenger.of(context).showSnackBar( - SnackBar(content: Text('Opening playlist: ${playlist.name}')), - ); - }, - borderRadius: BorderRadius.circular(12), + onTap: () => _openPlaylist(context), + borderRadius: BorderRadius.circular(16), child: Padding( padding: const EdgeInsets.all(16.0), child: Column( @@ -1283,6 +1309,57 @@ class _PlaylistCard extends StatelessWidget { ); } + Future _openPlaylist(BuildContext context) async { + final settingsBloc = Provider.of(context, listen: false); + final settings = settingsBloc.currentSettings; + + if (settings.pinepodsServer == null || + settings.pinepodsApiKey == null || + settings.pinepodsUserId == null) { + if (context.mounted) { + ScaffoldMessenger.of(context).showSnackBar( + const SnackBar( + content: Text('Not connected to PinePods server. Please connect in Settings.'), + backgroundColor: Colors.red, + ), + ); + } + return; + } + + try { + final pinepodsService = PinepodsService(); + pinepodsService.setCredentials( + settings.pinepodsServer!, + settings.pinepodsApiKey!, + ); + + final userPlaylists = await pinepodsService.getUserPlaylists(settings.pinepodsUserId!); + final fullPlaylistData = userPlaylists.firstWhere( + (p) => p.playlistId == playlist.playlistId, + orElse: () => throw Exception('Playlist not found'), + ); + + if (context.mounted) { + Navigator.push( + context, + MaterialPageRoute( + builder: (context) => PlaylistEpisodesPage(playlist: fullPlaylistData), + ), + ); + } + } catch (e) { + if (context.mounted) { + ScaffoldMessenger.of(context).showSnackBar( + SnackBar( + content: Text('Error opening playlist: $e'), + backgroundColor: Colors.red, + ), + ); + } + } + } + IconData _getIconFromName(String iconName) { switch (iconName) { case 'ph-music-notes': diff --git a/mobile/lib/ui/pinepods/playlist_episodes.dart b/mobile/lib/ui/pinepods/playlist_episodes.dart index ac1f369b..1d131b9b 100644 --- a/mobile/lib/ui/pinepods/playlist_episodes.dart +++ b/mobile/lib/ui/pinepods/playlist_episodes.dart @@ -4,6 +4,7 @@ import 'package:pinepods_mobile/bloc/settings/settings_bloc.dart'; import 'package:pinepods_mobile/services/pinepods/pinepods_service.dart'; import 'package:pinepods_mobile/services/pinepods/pinepods_audio_service.dart'; import 'package:pinepods_mobile/services/audio/audio_player_service.dart'; +import 'package:pinepods_mobile/services/global_services.dart'; import 'package:pinepods_mobile/entities/pinepods_episode.dart'; import 'package:pinepods_mobile/ui/widgets/platform_progress_indicator.dart'; import 'package:pinepods_mobile/ui/widgets/pinepods_episode_card.dart'; @@ -29,8 +30,7 @@ class _PlaylistEpisodesPageState extends State { bool _isLoading = true; String? _errorMessage; - // Audio service and context menu state - PinepodsAudioService? _audioService; + // Use global audio service instead of creating local instance int? _contextMenuEpisodeIndex; @override @@ -63,6 +63,7 @@ class _PlaylistEpisodesPageState extends State { settings.pinepodsServer!, settings.pinepodsApiKey!, ); + GlobalServices.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); final response = await _pinepodsService.getPlaylistEpisodes( settings.pinepodsUserId!, @@ -132,27 +133,9 @@ class _PlaylistEpisodesPageState extends State { } } - void _initializeAudioService() { - if (_audioService != null) return; // Already initialized - - try { - final audioPlayerService = Provider.of(context, listen: false); - final settingsBloc = Provider.of(context, listen: false); - - _audioService = PinepodsAudioService( - audioPlayerService, - _pinepodsService, - settingsBloc, - ); - } catch (e) { - // Provider not available - audio service will remain null - } - } + PinepodsAudioService? get _audioService => GlobalServices.pinepodsAudioService; Future _playEpisode(PinepodsEpisode episode) async { - // Try to initialize audio service if not already done - _initializeAudioService(); - if (_audioService == null) { _showSnackBar('Audio service not available', Colors.red); return; diff --git a/mobile/lib/ui/pinepods/playlists.dart b/mobile/lib/ui/pinepods/playlists.dart index e4ec65b0..681845aa 100644 --- a/mobile/lib/ui/pinepods/playlists.dart +++ b/mobile/lib/ui/pinepods/playlists.dart @@ -3,6 +3,8 @@ import 'package:flutter/material.dart'; import 'package:pinepods_mobile/bloc/settings/settings_bloc.dart'; import 'package:pinepods_mobile/services/pinepods/pinepods_service.dart'; import 'package:pinepods_mobile/ui/widgets/platform_progress_indicator.dart'; +import 'package:pinepods_mobile/ui/widgets/server_error_page.dart'; +import 'package:pinepods_mobile/services/error_handling_service.dart'; import 'package:pinepods_mobile/ui/pinepods/playlist_episodes.dart'; import 'package:pinepods_mobile/ui/pinepods/create_playlist.dart'; import 'package:provider/provider.dart'; @@ -28,6 +30,25 @@ class _PinepodsPlaylistsState extends State { _loadPlaylists(); } + /// Calculate responsive cross axis count for playlist grid + int _getPlaylistCrossAxisCount(BuildContext context) { + final screenWidth = MediaQuery.of(context).size.width; + if (screenWidth > 1200) return 4; // Very wide screens (large tablets, desktop) + if (screenWidth > 800) return 3; // Wide tablets like iPad + if (screenWidth > 500) return 2; // Standard phones and small tablets + return 1; // Very small phones (< 500px) + } + + /// Calculate responsive aspect ratio for playlist cards + double _getPlaylistAspectRatio(BuildContext context) { + final screenWidth = MediaQuery.of(context).size.width; + if (screenWidth <= 500) { + // Single column on small screens - generous height for multi-line descriptions + padding + return 1.8; // Allows space for title + 2-3 lines of description + proper padding + } + return 1.1; // Standard aspect ratio for multi-column layouts + } + Future _loadPlaylists() async { final settingsBloc = Provider.of(context, listen: false); final settings = settingsBloc.currentSettings; @@ -256,39 +277,15 @@ class _PinepodsPlaylistsState extends State { } if (_errorMessage != null) { - return SliverFillRemaining( - hasScrollBody: false, - child: Padding( - padding: const EdgeInsets.all(32.0), - child: Column( - mainAxisAlignment: MainAxisAlignment.center, - crossAxisAlignment: CrossAxisAlignment.center, - children: [ - Icon( - Icons.error_outline, - size: 75, - color: Theme.of(context).colorScheme.error, - ), - const SizedBox(height: 16), - Text( - 'Error loading playlists', - style: Theme.of(context).textTheme.titleLarge, - textAlign: TextAlign.center, - ), - const SizedBox(height: 8), - Text( - _errorMessage!, - style: Theme.of(context).textTheme.bodyMedium, - textAlign: TextAlign.center, - ), - const SizedBox(height: 16), - ElevatedButton( - onPressed: _loadPlaylists, - child: const Text('Retry'), - ), - ], - ), - ), + return SliverServerErrorPage( + errorMessage: _errorMessage!.isServerConnectionError + ? null + : _errorMessage, + onRetry: _loadPlaylists, + title: 'Playlists Unavailable', + subtitle: _errorMessage!.isServerConnectionError + ? 'Unable to connect to the PinePods server' + : 'Failed to load your playlists', ); } @@ -413,11 +410,11 @@ class _PinepodsPlaylistsState extends State { GridView.builder( shrinkWrap: true, physics: const NeverScrollableScrollPhysics(), - gridDelegate: const SliverGridDelegateWithFixedCrossAxisCount( - crossAxisCount: 2, + gridDelegate: SliverGridDelegateWithFixedCrossAxisCount( + crossAxisCount: _getPlaylistCrossAxisCount(context), crossAxisSpacing: 12, mainAxisSpacing: 12, - childAspectRatio: 1.1, + childAspectRatio: _getPlaylistAspectRatio(context), ), itemCount: _playlists!.length, itemBuilder: (context, index) { @@ -435,6 +432,9 @@ class _PinepodsPlaylistsState extends State { }, child: Card( elevation: isSelected ? 8 : 2, + shape: RoundedRectangleBorder( + borderRadius: BorderRadius.circular(12), + ), color: isSelected ? Theme.of(context).primaryColor.withOpacity(0.1) : null, diff --git a/mobile/lib/ui/pinepods/podcast_details.dart b/mobile/lib/ui/pinepods/podcast_details.dart index 13e77529..bbc908cd 100644 --- a/mobile/lib/ui/pinepods/podcast_details.dart +++ b/mobile/lib/ui/pinepods/podcast_details.dart @@ -9,6 +9,7 @@ import 'package:pinepods_mobile/entities/episode.dart'; import 'package:pinepods_mobile/entities/person.dart'; import 'package:pinepods_mobile/services/pinepods/pinepods_service.dart'; import 'package:pinepods_mobile/services/podcast/podcast_service.dart'; +import 'package:pinepods_mobile/services/global_services.dart'; import 'package:pinepods_mobile/ui/widgets/pinepods_episode_card.dart'; import 'package:pinepods_mobile/ui/widgets/platform_progress_indicator.dart'; import 'package:pinepods_mobile/ui/widgets/episode_context_menu.dart'; @@ -17,6 +18,8 @@ import 'package:pinepods_mobile/ui/pinepods/episode_details.dart'; import 'package:pinepods_mobile/services/pinepods/pinepods_audio_service.dart'; import 'package:pinepods_mobile/services/audio/audio_player_service.dart'; import 'package:pinepods_mobile/ui/podcast/mini_player.dart'; +import 'package:pinepods_mobile/ui/utils/player_utils.dart'; +import 'package:pinepods_mobile/ui/utils/local_download_utils.dart'; import 'package:provider/provider.dart'; import 'package:sliver_tools/sliver_tools.dart'; @@ -40,11 +43,12 @@ class _PinepodsPodcastDetailsState extends State { final PinepodsService _pinepodsService = PinepodsService(); bool _isLoading = false; bool _isFollowing = false; + bool _isFollowButtonLoading = false; String? _errorMessage; List _episodes = []; List _filteredEpisodes = []; int? _contextMenuEpisodeIndex; - PinepodsAudioService? _audioService; + // Use global audio service instead of creating local instance final TextEditingController _searchController = TextEditingController(); String _searchQuery = ''; List _hosts = []; @@ -61,7 +65,7 @@ class _PinepodsPodcastDetailsState extends State { @override void dispose() { _searchController.dispose(); - _audioService?.dispose(); + // Don't dispose global audio service - it should persist across pages super.dispose(); } @@ -91,6 +95,7 @@ class _PinepodsPodcastDetailsState extends State { settings.pinepodsServer!, settings.pinepodsApiKey!, ); + GlobalServices.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); } } @@ -194,7 +199,30 @@ class _PinepodsPodcastDetailsState extends State { if (podcastId != null && podcastId > 0) { // Get episodes from server episodes = await _pinepodsService.getPodcastEpisodes(userId, podcastId); - print('Loaded ${episodes.length} episodes'); + print('Loaded ${episodes.length} episodes from server for podcastId: $podcastId'); + + // If server has no episodes, this podcast may need episode sync + if (episodes.isEmpty) { + print('Server has no episodes for subscribed podcast. This should not happen.'); + print('Podcast ID: $podcastId, Title: ${widget.podcast.title}'); + + // For subscribed podcasts, we should NOT fall back to RSS + // The server should have episodes. This indicates a server-side sync issue. + // Fall back to RSS ONLY as emergency backup, but episodes won't be clickable + try { + final podcastService = Provider.of(context, listen: false); + final rssPodcast = Podcast.fromUrl(url: widget.podcast.url); + + final loadedPodcast = await podcastService.loadPodcast(podcast: rssPodcast); + + if (loadedPodcast != null && loadedPodcast.episodes.isNotEmpty) { + episodes = loadedPodcast.episodes.map(_convertEpisodeToPinepodsEpisode).toList(); + print('Emergency RSS fallback: Loaded ${episodes.length} episodes (NOT CLICKABLE)'); + } + } catch (e) { + print('Emergency RSS fallback also failed: $e'); + } + } // Fetch podcast 2.0 data for hosts information try { @@ -272,11 +300,19 @@ class _PinepodsPodcastDetailsState extends State { } Future _toggleFollow() async { + print('PinePods Follow button: CLICKED - Setting loading to true'); + setState(() { + _isFollowButtonLoading = true; + }); + final settingsBloc = Provider.of(context, listen: false); final settings = settingsBloc.currentSettings; final userId = settings.pinepodsUserId; if (userId == null) { + setState(() { + _isFollowButtonLoading = false; + }); _showSnackBar('Not logged in to PinePods server', Colors.red); return; } @@ -322,6 +358,12 @@ class _PinepodsPodcastDetailsState extends State { } } catch (e) { _showSnackBar('Error: $e', Colors.red); + } finally { + // Always reset loading state + setState(() { + _isFollowButtonLoading = false; + }); + print('PinePods Follow button: Loading state reset to false'); } } @@ -336,10 +378,57 @@ class _PinepodsPodcastDetailsState extends State { } - void _showEpisodeContextMenu(int episodeIndex) { - setState(() { - _contextMenuEpisodeIndex = episodeIndex; - }); + Future _showEpisodeContextMenu(int episodeIndex) async { + final episode = _episodes[episodeIndex]; + final isDownloadedLocally = await LocalDownloadUtils.isEpisodeDownloadedLocally(context, episode); + + if (!mounted) return; + + showDialog( + context: context, + barrierColor: Colors.black.withOpacity(0.3), + builder: (context) => EpisodeContextMenu( + episode: episode, + isDownloadedLocally: isDownloadedLocally, + onSave: () { + Navigator.of(context).pop(); + _saveEpisode(episodeIndex); + }, + onRemoveSaved: () { + Navigator.of(context).pop(); + _removeSavedEpisode(episodeIndex); + }, + onDownload: episode.downloaded + ? () { + Navigator.of(context).pop(); + _deleteEpisode(episodeIndex); + } + : () { + Navigator.of(context).pop(); + _downloadEpisode(episodeIndex); + }, + onLocalDownload: () { + Navigator.of(context).pop(); + _localDownloadEpisode(episodeIndex); + }, + onDeleteLocalDownload: () { + Navigator.of(context).pop(); + _deleteLocalDownload(episodeIndex); + }, + onQueue: () { + Navigator.of(context).pop(); + _queueEpisode(episodeIndex); + }, + onMarkComplete: () { + Navigator.of(context).pop(); + _markEpisodeComplete(episodeIndex); + }, + onDismiss: () { + Navigator.of(context).pop(); + _hideEpisodeContextMenu(); + }, + ), + ); } void _hideEpisodeContextMenu() { @@ -348,34 +437,19 @@ class _PinepodsPodcastDetailsState extends State { }); } - void _initializeAudioService() { - if (_audioService != null) return; - - try { - final audioPlayerService = Provider.of(context, listen: false); - final settingsBloc = Provider.of(context, listen: false); - - _audioService = PinepodsAudioService( - audioPlayerService, - _pinepodsService, - settingsBloc, - ); - } catch (e) { - print('Error initializing audio service: $e'); - } - } + PinepodsAudioService? get _audioService => GlobalServices.pinepodsAudioService; Future _playEpisode(PinepodsEpisode episode) async { - _initializeAudioService(); - if (_audioService == null) { _showSnackBar('Audio service not available', Colors.red); return; } try { - await _audioService!.playPinepodsEpisode( - pinepodsEpisode: episode, + await playPinepodsEpisodeWithOptionalFullScreen( + context, + _audioService!, + episode, resume: episode.isStarted, ); } catch (e) { @@ -383,27 +457,312 @@ class _PinepodsPodcastDetailsState extends State { } } - @override - Widget build(BuildContext context) { - // Show context menu if needed - if (_contextMenuEpisodeIndex != null) { - final episodeIndex = _contextMenuEpisodeIndex!; - WidgetsBinding.instance.addPostFrameCallback((_) { - showDialog( - context: context, - barrierColor: Colors.black.withOpacity(0.3), - builder: (context) => EpisodeContextMenu( - episode: _episodes[episodeIndex], - onDismiss: () { - Navigator.of(context).pop(); - _hideEpisodeContextMenu(); - }, - ), + Future _saveEpisode(int episodeIndex) async { + final episode = _episodes[episodeIndex]; + final settingsBloc = Provider.of(context, listen: false); + final settings = settingsBloc.currentSettings; + final userId = settings.pinepodsUserId; + + if (userId == null) { + _showSnackBar('Not logged in', Colors.red); + return; + } + + _pinepodsService.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); + + try { + final success = await _pinepodsService.saveEpisode( + episode.episodeId, + userId, + episode.isYoutube, + ); + + if (success) { + setState(() { + _episodes[episodeIndex] = _updateEpisodeProperty(episode, saved: true); + _filteredEpisodes = _episodes.where((e) => + e.episodeTitle.toLowerCase().contains(_searchController.text.toLowerCase()) + ).toList(); + }); + _showSnackBar('Episode saved!', Colors.green); + } else { + _showSnackBar('Failed to save episode', Colors.red); + } + } catch (e) { + _showSnackBar('Error saving episode: $e', Colors.red); + } + + _hideEpisodeContextMenu(); + } + + Future _removeSavedEpisode(int episodeIndex) async { + final episode = _episodes[episodeIndex]; + final settingsBloc = Provider.of(context, listen: false); + final settings = settingsBloc.currentSettings; + final userId = settings.pinepodsUserId; + + if (userId == null) { + _showSnackBar('Not logged in', Colors.red); + return; + } + + _pinepodsService.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); + + try { + final success = await _pinepodsService.removeSavedEpisode( + episode.episodeId, + userId, + episode.isYoutube, + ); + + if (success) { + setState(() { + _episodes[episodeIndex] = _updateEpisodeProperty(episode, saved: false); + _filteredEpisodes = _episodes.where((e) => + e.episodeTitle.toLowerCase().contains(_searchController.text.toLowerCase()) + ).toList(); + }); + _showSnackBar('Removed from saved episodes', Colors.orange); + } else { + _showSnackBar('Failed to remove saved episode', Colors.red); + } + } catch (e) { + _showSnackBar('Error removing saved episode: $e', Colors.red); + } + + _hideEpisodeContextMenu(); + } + + Future _downloadEpisode(int episodeIndex) async { + final episode = _episodes[episodeIndex]; + final settingsBloc = Provider.of(context, listen: false); + final settings = settingsBloc.currentSettings; + final userId = settings.pinepodsUserId; + + if (userId == null) { + _showSnackBar('Not logged in', Colors.red); + return; + } + + _pinepodsService.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); + + try { + final success = await _pinepodsService.downloadEpisode( + episode.episodeId, + userId, + episode.isYoutube, + ); + + if (success) { + setState(() { + _episodes[episodeIndex] = _updateEpisodeProperty(episode, downloaded: true); + _filteredEpisodes = _episodes.where((e) => + e.episodeTitle.toLowerCase().contains(_searchController.text.toLowerCase()) + ).toList(); + }); + _showSnackBar('Episode download started!', Colors.green); + } else { + _showSnackBar('Failed to download episode', Colors.red); + } + } catch (e) { + _showSnackBar('Error downloading episode: $e', Colors.red); + } + + _hideEpisodeContextMenu(); + } + + Future _deleteEpisode(int episodeIndex) async { + final episode = _episodes[episodeIndex]; + final settingsBloc = Provider.of(context, listen: false); + final settings = settingsBloc.currentSettings; + final userId = settings.pinepodsUserId; + + if (userId == null) { + _showSnackBar('Not logged in', Colors.red); + return; + } + + _pinepodsService.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); + + try { + final success = await _pinepodsService.deleteEpisode( + episode.episodeId, + userId, + episode.isYoutube, + ); + + if (success) { + setState(() { + _episodes[episodeIndex] = _updateEpisodeProperty(episode, downloaded: false); + _filteredEpisodes = _episodes.where((e) => + e.episodeTitle.toLowerCase().contains(_searchController.text.toLowerCase()) + ).toList(); + }); + _showSnackBar('Episode deleted from server', Colors.orange); + } else { + _showSnackBar('Failed to delete episode', Colors.red); + } + } catch (e) { + _showSnackBar('Error deleting episode: $e', Colors.red); + } + + _hideEpisodeContextMenu(); + } + + Future _queueEpisode(int episodeIndex) async { + final episode = _episodes[episodeIndex]; + final settingsBloc = Provider.of(context, listen: false); + final settings = settingsBloc.currentSettings; + final userId = settings.pinepodsUserId; + + if (userId == null) { + _showSnackBar('Not logged in', Colors.red); + return; + } + + _pinepodsService.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); + + try { + bool success; + if (episode.queued) { + success = await _pinepodsService.removeQueuedEpisode( + episode.episodeId, + userId, + episode.isYoutube, ); - }); - _contextMenuEpisodeIndex = null; + if (success) { + setState(() { + _episodes[episodeIndex] = _updateEpisodeProperty(episode, queued: false); + _filteredEpisodes = _episodes.where((e) => + e.episodeTitle.toLowerCase().contains(_searchController.text.toLowerCase()) + ).toList(); + }); + _showSnackBar('Removed from queue', Colors.orange); + } + } else { + success = await _pinepodsService.queueEpisode( + episode.episodeId, + userId, + episode.isYoutube, + ); + if (success) { + setState(() { + _episodes[episodeIndex] = _updateEpisodeProperty(episode, queued: true); + _filteredEpisodes = _episodes.where((e) => + e.episodeTitle.toLowerCase().contains(_searchController.text.toLowerCase()) + ).toList(); + }); + _showSnackBar('Added to queue!', Colors.green); + } + } + + if (!success) { + _showSnackBar('Failed to update queue', Colors.red); + } + } catch (e) { + _showSnackBar('Error updating queue: $e', Colors.red); } + _hideEpisodeContextMenu(); + } + + Future _markEpisodeComplete(int episodeIndex) async { + final episode = _episodes[episodeIndex]; + final settingsBloc = Provider.of(context, listen: false); + final settings = settingsBloc.currentSettings; + final userId = settings.pinepodsUserId; + + if (userId == null) { + _showSnackBar('Not logged in', Colors.red); + return; + } + + _pinepodsService.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); + + try { + final success = await _pinepodsService.markEpisodeCompleted( + episode.episodeId, + userId, + episode.isYoutube, + ); + + if (success) { + setState(() { + _episodes[episodeIndex] = _updateEpisodeProperty(episode, completed: true); + _filteredEpisodes = _episodes.where((e) => + e.episodeTitle.toLowerCase().contains(_searchController.text.toLowerCase()) + ).toList(); + }); + _showSnackBar('Episode marked as complete', Colors.green); + } else { + _showSnackBar('Failed to mark episode complete', Colors.red); + } + } catch (e) { + _showSnackBar('Error marking episode complete: $e', Colors.red); + } + + _hideEpisodeContextMenu(); + } + + Future _localDownloadEpisode(int episodeIndex) async { + final episode = _episodes[episodeIndex]; + + final success = await LocalDownloadUtils.localDownloadEpisode(context, episode); + + if (success) { + _showSnackBar('Episode download started', Colors.green); + } else { + _showSnackBar('Failed to start download', Colors.red); + } + + _hideEpisodeContextMenu(); + } + + Future _deleteLocalDownload(int episodeIndex) async { + final episode = _episodes[episodeIndex]; + + final deletedCount = await LocalDownloadUtils.deleteLocalDownload(context, episode); + + if (deletedCount > 0) { + _showSnackBar( + 'Deleted $deletedCount local download${deletedCount > 1 ? 's' : ''}', + Colors.orange + ); + } else { + _showSnackBar('Local download not found', Colors.red); + } + + _hideEpisodeContextMenu(); + } + + PinepodsEpisode _updateEpisodeProperty( + PinepodsEpisode episode, { + bool? saved, + bool? downloaded, + bool? queued, + bool? completed, + }) { + return PinepodsEpisode( + podcastName: episode.podcastName, + episodeTitle: episode.episodeTitle, + episodePubDate: episode.episodePubDate, + episodeDescription: episode.episodeDescription, + episodeArtwork: episode.episodeArtwork, + episodeUrl: episode.episodeUrl, + episodeDuration: episode.episodeDuration, + listenDuration: episode.listenDuration, + episodeId: episode.episodeId, + completed: completed ?? episode.completed, + saved: saved ?? episode.saved, + queued: queued ?? episode.queued, + downloaded: downloaded ?? episode.downloaded, + isYoutube: episode.isYoutube, + podcastId: episode.podcastId, + ); + } + + @override + Widget build(BuildContext context) { return Scaffold( body: Column( children: [ @@ -469,11 +828,20 @@ class _PinepodsPodcastDetailsState extends State { ), actions: [ IconButton( - onPressed: _toggleFollow, - icon: Icon( - _isFollowing ? Icons.favorite : Icons.favorite_border, - color: _isFollowing ? Colors.red : Colors.white, - ), + onPressed: _isFollowButtonLoading ? null : _toggleFollow, + icon: _isFollowButtonLoading + ? const SizedBox( + width: 24, + height: 24, + child: CircularProgressIndicator( + strokeWidth: 2.0, + valueColor: AlwaysStoppedAnimation(Colors.white), + ), + ) + : Icon( + _isFollowing ? Icons.favorite : Icons.favorite_border, + color: _isFollowing ? Colors.red : Colors.white, + ), tooltip: _isFollowing ? 'Unfollow' : 'Follow', ), ], @@ -505,11 +873,20 @@ class _PinepodsPodcastDetailsState extends State { ), ), ElevatedButton.icon( - onPressed: _toggleFollow, - icon: Icon( - _isFollowing ? Icons.remove : Icons.add, - size: 16, - ), + onPressed: _isFollowButtonLoading ? null : _toggleFollow, + icon: _isFollowButtonLoading + ? const SizedBox( + width: 16, + height: 16, + child: CircularProgressIndicator( + strokeWidth: 2.0, + valueColor: AlwaysStoppedAnimation(Colors.white), + ), + ) + : Icon( + _isFollowing ? Icons.remove : Icons.add, + size: 16, + ), label: Text(_isFollowing ? 'Unfollow' : 'Follow'), style: ElevatedButton.styleFrom( backgroundColor: _isFollowing ? Colors.red : Colors.green, @@ -816,10 +1193,16 @@ class _PinepodsPodcastDetailsState extends State { final episode = _filteredEpisodes[index]; // Find the original index for context menu operations final originalIndex = _episodes.indexOf(episode); + final bool hasValidServerEpisodeId = episode.episodeId > 0; + + if (!hasValidServerEpisodeId) { + print('Episode "${episode.episodeTitle}" has no server ID (RSS fallback) - disabling episode details navigation'); + } + return PinepodsEpisodeCard( episode: episode, - onTap: _isFollowing ? () { - // Navigate to episode details only if following + onTap: _isFollowing && hasValidServerEpisodeId ? () { + // Navigate to episode details only if following AND has valid server episode ID Navigator.push( context, MaterialPageRoute( @@ -828,13 +1211,13 @@ class _PinepodsPodcastDetailsState extends State { ), ), ); - } : null, // Disable tap if not following - onLongPress: _isFollowing ? () { + } : null, // Disable tap if not following or no valid episode ID + onLongPress: _isFollowing && hasValidServerEpisodeId ? () { _showEpisodeContextMenu(originalIndex); - } : null, // Disable long press if not following + } : null, // Disable long press if not following or no valid episode ID onPlayPressed: _isFollowing ? () { _playEpisode(episode); - } : null, // Disable play if not following + } : null, // Allow play for RSS episodes since it uses direct URL ); }, childCount: _filteredEpisodes.length, diff --git a/mobile/lib/ui/pinepods/podcasts.dart b/mobile/lib/ui/pinepods/podcasts.dart index 6f1cb4f1..b11d733b 100644 --- a/mobile/lib/ui/pinepods/podcasts.dart +++ b/mobile/lib/ui/pinepods/podcasts.dart @@ -12,6 +12,8 @@ import 'package:pinepods_mobile/ui/widgets/pinepods_podcast_grid_tile.dart'; import 'package:pinepods_mobile/ui/widgets/pinepods_podcast_tile.dart'; import 'package:pinepods_mobile/ui/widgets/layout_selector.dart'; import 'package:pinepods_mobile/services/pinepods/pinepods_service.dart'; +import 'package:pinepods_mobile/ui/widgets/server_error_page.dart'; +import 'package:pinepods_mobile/services/error_handling_service.dart'; import 'package:flutter/material.dart'; import 'package:provider/provider.dart'; import 'package:sliver_tools/sliver_tools.dart'; @@ -266,39 +268,15 @@ class _PinepodsPodcastsState extends State { } if (_errorMessage != null) { - return SliverFillRemaining( - hasScrollBody: false, - child: Padding( - padding: const EdgeInsets.all(32.0), - child: Column( - mainAxisAlignment: MainAxisAlignment.center, - crossAxisAlignment: CrossAxisAlignment.center, - children: [ - Icon( - Icons.error_outline, - size: 75, - color: Theme.of(context).colorScheme.error, - ), - const SizedBox(height: 16), - Text( - 'Error loading podcasts', - style: Theme.of(context).textTheme.titleLarge, - textAlign: TextAlign.center, - ), - const SizedBox(height: 8), - Text( - _errorMessage!, - style: Theme.of(context).textTheme.bodyMedium, - textAlign: TextAlign.center, - ), - const SizedBox(height: 16), - ElevatedButton( - onPressed: _loadPodcasts, - child: const Text('Retry'), - ), - ], - ), - ), + return SliverServerErrorPage( + errorMessage: _errorMessage!.isServerConnectionError + ? null + : _errorMessage, + onRetry: _loadPodcasts, + title: 'Podcasts Unavailable', + subtitle: _errorMessage!.isServerConnectionError + ? 'Unable to connect to the PinePods server' + : 'Failed to load your podcasts', ); } diff --git a/mobile/lib/ui/pinepods/queue.dart b/mobile/lib/ui/pinepods/queue.dart index 4117d306..c6f24458 100644 --- a/mobile/lib/ui/pinepods/queue.dart +++ b/mobile/lib/ui/pinepods/queue.dart @@ -3,12 +3,13 @@ import 'package:flutter/material.dart'; import 'package:pinepods_mobile/bloc/settings/settings_bloc.dart'; import 'package:pinepods_mobile/services/pinepods/pinepods_service.dart'; import 'package:pinepods_mobile/services/pinepods/pinepods_audio_service.dart'; -import 'package:pinepods_mobile/services/audio/audio_player_service.dart'; import 'package:pinepods_mobile/entities/pinepods_episode.dart'; import 'package:pinepods_mobile/ui/widgets/episode_context_menu.dart'; -import 'package:pinepods_mobile/ui/widgets/pinepods_episode_card.dart'; import 'package:pinepods_mobile/ui/widgets/draggable_queue_episode_card.dart'; import 'package:pinepods_mobile/ui/pinepods/episode_details.dart'; +import 'package:pinepods_mobile/ui/utils/local_download_utils.dart'; +import 'package:pinepods_mobile/ui/utils/position_utils.dart'; +import 'package:pinepods_mobile/services/global_services.dart'; import 'package:provider/provider.dart'; class PinepodsQueue extends StatefulWidget { @@ -23,8 +24,12 @@ class _PinepodsQueueState extends State { String _errorMessage = ''; List _episodes = []; final PinepodsService _pinepodsService = PinepodsService(); - PinepodsAudioService? _audioService; + // Use global audio service instead of creating local instance int? _contextMenuEpisodeIndex; + + // Auto-scroll related variables + bool _isDragging = false; + bool _isAutoScrolling = false; @override void initState() { @@ -32,22 +37,7 @@ class _PinepodsQueueState extends State { _loadQueuedEpisodes(); } - void _initializeAudioService() { - if (_audioService != null) return; - - try { - final audioPlayerService = Provider.of(context, listen: false); - final settingsBloc = Provider.of(context, listen: false); - - _audioService = PinepodsAudioService( - audioPlayerService, - _pinepodsService, - settingsBloc, - ); - } catch (e) { - // Provider not available - audio service will remain null - } - } + PinepodsAudioService? get _audioService => GlobalServices.pinepodsAudioService; Future _loadQueuedEpisodes() async { setState(() { @@ -70,14 +60,26 @@ class _PinepodsQueueState extends State { } _pinepodsService.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); + GlobalServices.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); final userId = settings.pinepodsUserId!; final episodes = await _pinepodsService.getQueuedEpisodes(userId); + // Enrich episodes with best available positions (local vs server) + final enrichedEpisodes = await PositionUtils.enrichEpisodesWithBestPositions( + context, + _pinepodsService, + episodes, + userId, + ); + setState(() { - _episodes = episodes; + _episodes = enrichedEpisodes; _isLoading = false; }); + + // After loading episodes, check their local download status + await LocalDownloadUtils.loadLocalDownloadStatuses(context, enrichedEpisodes); } catch (e) { setState(() { _errorMessage = 'Failed to load queued episodes: ${e.toString()}'; @@ -87,6 +89,8 @@ class _PinepodsQueueState extends State { } Future _refresh() async { + // Clear local download status cache on refresh + LocalDownloadUtils.clearCache(); await _loadQueuedEpisodes(); } @@ -134,7 +138,6 @@ class _PinepodsQueueState extends State { } Future _playEpisode(PinepodsEpisode episode) async { - _initializeAudioService(); if (_audioService == null) { ScaffoldMessenger.of(context).showSnackBar( @@ -187,10 +190,84 @@ class _PinepodsQueueState extends State { } } - void _showContextMenu(int episodeIndex) { - setState(() { - _contextMenuEpisodeIndex = episodeIndex; - }); + Future _showContextMenu(int episodeIndex) async { + final episode = _episodes[episodeIndex]; + final isDownloadedLocally = await LocalDownloadUtils.isEpisodeDownloadedLocally(context, episode); + + if (!mounted) return; + + showDialog( + context: context, + barrierColor: Colors.black.withValues(alpha: 0.3), + builder: (context) => EpisodeContextMenu( + episode: episode, + isDownloadedLocally: isDownloadedLocally, + onSave: () { + Navigator.of(context).pop(); + _saveEpisode(episodeIndex); + }, + onRemoveSaved: () { + Navigator.of(context).pop(); + _removeSavedEpisode(episodeIndex); + }, + onDownload: episode.downloaded + ? () { + Navigator.of(context).pop(); + _deleteEpisode(episodeIndex); + } + : () { + Navigator.of(context).pop(); + _downloadEpisode(episodeIndex); + }, + onLocalDownload: () { + Navigator.of(context).pop(); + _localDownloadEpisode(episodeIndex); + }, + onDeleteLocalDownload: () { + Navigator.of(context).pop(); + _deleteLocalDownload(episodeIndex); + }, + onQueue: () { + Navigator.of(context).pop(); + _toggleQueueEpisode(episodeIndex); + }, + onMarkComplete: () { + Navigator.of(context).pop(); + _toggleMarkComplete(episodeIndex); + }, + onDismiss: () { + Navigator.of(context).pop(); + }, + ), + ); + } + + Future _localDownloadEpisode(int episodeIndex) async { + final episode = _episodes[episodeIndex]; + + final success = await LocalDownloadUtils.localDownloadEpisode(context, episode); + + if (success) { + LocalDownloadUtils.showSnackBar(context, 'Episode download started', Colors.green); + } else { + LocalDownloadUtils.showSnackBar(context, 'Failed to start download', Colors.red); + } + } + + Future _deleteLocalDownload(int episodeIndex) async { + final episode = _episodes[episodeIndex]; + + final deletedCount = await LocalDownloadUtils.deleteLocalDownload(context, episode); + + if (deletedCount > 0) { + LocalDownloadUtils.showSnackBar( + context, + 'Deleted $deletedCount local download${deletedCount > 1 ? 's' : ''}', + Colors.orange + ); + } else { + LocalDownloadUtils.showSnackBar(context, 'Local download not found', Colors.red); + } } void _hideContextMenu() { @@ -479,58 +556,76 @@ class _PinepodsQueueState extends State { ); } + void _startAutoScroll(bool scrollUp) async { + if (_isAutoScrolling) return; + _isAutoScrolling = true; + + while (_isDragging && _isAutoScrolling) { + // Find the nearest ScrollView controller + final ScrollController? scrollController = Scrollable.maybeOf(context)?.widget.controller; + + if (scrollController != null && scrollController.hasClients) { + final currentOffset = scrollController.offset; + final maxScrollExtent = scrollController.position.maxScrollExtent; + + if (scrollUp && currentOffset > 0) { + // Scroll up + final newOffset = (currentOffset - 8.0).clamp(0.0, maxScrollExtent); + scrollController.jumpTo(newOffset); + } else if (!scrollUp && currentOffset < maxScrollExtent) { + // Scroll down + final newOffset = (currentOffset + 8.0).clamp(0.0, maxScrollExtent); + scrollController.jumpTo(newOffset); + } else { + break; // Reached the edge + } + } + + await Future.delayed(const Duration(milliseconds: 16)); + } + + _isAutoScrolling = false; + } + + void _stopAutoScroll() { + _isAutoScrolling = false; + } + + void _checkAutoScroll(double globalY) { + if (!_isDragging) return; + + final MediaQueryData mediaQuery = MediaQuery.of(context); + final double screenHeight = mediaQuery.size.height; + final double topPadding = mediaQuery.padding.top; + final double bottomPadding = mediaQuery.padding.bottom; + + const double autoScrollThreshold = 80.0; + + if (globalY < topPadding + autoScrollThreshold) { + // Near top, scroll up + if (!_isAutoScrolling) { + _startAutoScroll(true); + } + } else if (globalY > screenHeight - bottomPadding - autoScrollThreshold) { + // Near bottom, scroll down + if (!_isAutoScrolling) { + _startAutoScroll(false); + } + } else { + // In the middle, stop auto-scrolling + _stopAutoScroll(); + } + } + @override void dispose() { - _audioService?.dispose(); + _stopAutoScroll(); + // Don't dispose global audio service - it should persist across pages super.dispose(); } @override Widget build(BuildContext context) { - // Show context menu as a modal overlay if needed - if (_contextMenuEpisodeIndex != null) { - final episodeIndex = _contextMenuEpisodeIndex!; - WidgetsBinding.instance.addPostFrameCallback((_) { - showDialog( - context: context, - barrierColor: Colors.black.withOpacity(0.3), - builder: (context) => EpisodeContextMenu( - episode: _episodes[episodeIndex], - onSave: () { - Navigator.of(context).pop(); - _saveEpisode(episodeIndex); - }, - onRemoveSaved: () { - Navigator.of(context).pop(); - _removeSavedEpisode(episodeIndex); - }, - onDownload: _episodes[episodeIndex].downloaded - ? () { - Navigator.of(context).pop(); - _deleteEpisode(episodeIndex); - } - : () { - Navigator.of(context).pop(); - _downloadEpisode(episodeIndex); - }, - onQueue: () { - Navigator.of(context).pop(); - _toggleQueueEpisode(episodeIndex); - }, - onMarkComplete: () { - Navigator.of(context).pop(); - _toggleMarkComplete(episodeIndex); - }, - onDismiss: () { - Navigator.of(context).pop(); - _hideContextMenu(); - }, - ), - ); - }); - _contextMenuEpisodeIndex = null; - } - if (_isLoading) { return const SliverFillRemaining( child: Center( @@ -616,11 +711,11 @@ class _PinepodsQueueState extends State { } Widget _buildEpisodesList() { - return SliverToBoxAdapter( - child: Column( - children: [ - // Header - Padding( + return SliverMainAxisGroup( + slivers: [ + // Header + SliverToBoxAdapter( + child: Padding( padding: const EdgeInsets.all(16.0), child: Row( mainAxisAlignment: MainAxisAlignment.spaceBetween, @@ -651,39 +746,60 @@ class _PinepodsQueueState extends State { ], ), ), - // Reorderable episodes list - ReorderableListView.builder( - shrinkWrap: true, - physics: const NeverScrollableScrollPhysics(), - buildDefaultDragHandles: false, // Disable automatic drag handles - onReorder: _reorderEpisodes, - itemCount: _episodes.length, - itemBuilder: (context, index) { - final episode = _episodes[index]; - return Container( - key: ValueKey(episode.episodeId), - margin: const EdgeInsets.only(bottom: 4), - child: DraggableQueueEpisodeCard( - episode: episode, - index: index, - onTap: () { - Navigator.push( - context, - MaterialPageRoute( - builder: (context) => PinepodsEpisodeDetails( - initialEpisode: episode, - ), - ), - ); - }, - onLongPress: () => _showContextMenu(index), - onPlayPressed: () => _playEpisode(episode), - ), - ); + ), + // Auto-scrolling reorderable episodes list wrapped with pointer detection + SliverToBoxAdapter( + child: Listener( + onPointerMove: (details) { + if (_isDragging) { + _checkAutoScroll(details.position.dy); + } }, + child: ReorderableListView.builder( + shrinkWrap: true, + physics: const NeverScrollableScrollPhysics(), + buildDefaultDragHandles: false, + onReorderStart: (index) { + setState(() { + _isDragging = true; + }); + }, + onReorderEnd: (index) { + setState(() { + _isDragging = false; + }); + _stopAutoScroll(); + }, + onReorder: _reorderEpisodes, + itemCount: _episodes.length, + itemBuilder: (context, index) { + final episode = _episodes[index]; + return Container( + key: ValueKey(episode.episodeId), + margin: const EdgeInsets.only(bottom: 4), + child: DraggableQueueEpisodeCard( + episode: episode, + index: index, + onTap: () { + Navigator.push( + context, + MaterialPageRoute( + builder: (context) => PinepodsEpisodeDetails( + initialEpisode: episode, + ), + ), + ); + }, + onLongPress: () => _showContextMenu(index), + onPlayPressed: () => _playEpisode(episode), + ), + ); + }, + ), ), - ], - ), + ), + ], ); } -} \ No newline at end of file +} + diff --git a/mobile/lib/ui/pinepods/saved.dart b/mobile/lib/ui/pinepods/saved.dart index 0406cb7c..24f16e3b 100644 --- a/mobile/lib/ui/pinepods/saved.dart +++ b/mobile/lib/ui/pinepods/saved.dart @@ -8,6 +8,12 @@ import 'package:pinepods_mobile/entities/pinepods_episode.dart'; import 'package:pinepods_mobile/ui/widgets/episode_context_menu.dart'; import 'package:pinepods_mobile/ui/widgets/pinepods_episode_card.dart'; import 'package:pinepods_mobile/ui/pinepods/episode_details.dart'; +import 'package:pinepods_mobile/ui/utils/local_download_utils.dart'; +import 'package:pinepods_mobile/ui/utils/player_utils.dart'; +import 'package:pinepods_mobile/ui/utils/position_utils.dart'; +import 'package:pinepods_mobile/ui/widgets/server_error_page.dart'; +import 'package:pinepods_mobile/services/error_handling_service.dart'; +import 'package:pinepods_mobile/services/global_services.dart'; import 'package:provider/provider.dart'; import 'package:sliver_tools/sliver_tools.dart'; @@ -24,7 +30,7 @@ class _PinepodsSavedState extends State { List _episodes = []; List _filteredEpisodes = []; final PinepodsService _pinepodsService = PinepodsService(); - PinepodsAudioService? _audioService; + // Use global audio service instead of creating local instance int? _contextMenuEpisodeIndex; final TextEditingController _searchController = TextEditingController(); String _searchQuery = ''; @@ -39,7 +45,7 @@ class _PinepodsSavedState extends State { @override void dispose() { _searchController.dispose(); - _audioService?.dispose(); + // Don't dispose global audio service - it should persist across pages super.dispose(); } @@ -61,22 +67,7 @@ class _PinepodsSavedState extends State { } } - void _initializeAudioService() { - if (_audioService != null) return; - - try { - final audioPlayerService = Provider.of(context, listen: false); - final settingsBloc = Provider.of(context, listen: false); - - _audioService = PinepodsAudioService( - audioPlayerService, - _pinepodsService, - settingsBloc, - ); - } catch (e) { - // Provider not available - audio service will remain null - } - } + PinepodsAudioService? get _audioService => GlobalServices.pinepodsAudioService; Future _loadSavedEpisodes() async { setState(() { @@ -99,15 +90,27 @@ class _PinepodsSavedState extends State { } _pinepodsService.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); + GlobalServices.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); final userId = settings.pinepodsUserId!; final episodes = await _pinepodsService.getSavedEpisodes(userId); + // Enrich episodes with best available positions (local vs server) + final enrichedEpisodes = await PositionUtils.enrichEpisodesWithBestPositions( + context, + _pinepodsService, + episodes, + userId, + ); + setState(() { - _episodes = episodes; + _episodes = enrichedEpisodes; _filterEpisodes(); // Initialize filtered list _isLoading = false; }); + + // After loading episodes, check their local download status + await LocalDownloadUtils.loadLocalDownloadStatuses(context, enrichedEpisodes); } catch (e) { setState(() { _errorMessage = 'Failed to load saved episodes: ${e.toString()}'; @@ -117,11 +120,12 @@ class _PinepodsSavedState extends State { } Future _refresh() async { + // Clear local download status cache on refresh + LocalDownloadUtils.clearCache(); await _loadSavedEpisodes(); } Future _playEpisode(PinepodsEpisode episode) async { - _initializeAudioService(); if (_audioService == null) { ScaffoldMessenger.of(context).showSnackBar( @@ -174,10 +178,84 @@ class _PinepodsSavedState extends State { } } - void _showContextMenu(int episodeIndex) { - setState(() { - _contextMenuEpisodeIndex = episodeIndex; - }); + Future _showContextMenu(int episodeIndex) async { + final episode = _episodes[episodeIndex]; + final isDownloadedLocally = await LocalDownloadUtils.isEpisodeDownloadedLocally(context, episode); + + if (!mounted) return; + + showDialog( + context: context, + barrierColor: Colors.black.withOpacity(0.3), + builder: (context) => EpisodeContextMenu( + episode: episode, + isDownloadedLocally: isDownloadedLocally, + onSave: () { + Navigator.of(context).pop(); + _saveEpisode(episodeIndex); + }, + onRemoveSaved: () { + Navigator.of(context).pop(); + _removeSavedEpisode(episodeIndex); + }, + onDownload: episode.downloaded + ? () { + Navigator.of(context).pop(); + _deleteEpisode(episodeIndex); + } + : () { + Navigator.of(context).pop(); + _downloadEpisode(episodeIndex); + }, + onLocalDownload: () { + Navigator.of(context).pop(); + _localDownloadEpisode(episodeIndex); + }, + onDeleteLocalDownload: () { + Navigator.of(context).pop(); + _deleteLocalDownload(episodeIndex); + }, + onQueue: () { + Navigator.of(context).pop(); + _toggleQueueEpisode(episodeIndex); + }, + onMarkComplete: () { + Navigator.of(context).pop(); + _toggleMarkComplete(episodeIndex); + }, + onDismiss: () { + Navigator.of(context).pop(); + }, + ), + ); + } + + Future _localDownloadEpisode(int episodeIndex) async { + final episode = _episodes[episodeIndex]; + + final success = await LocalDownloadUtils.localDownloadEpisode(context, episode); + + if (success) { + LocalDownloadUtils.showSnackBar(context, 'Episode download started', Colors.green); + } else { + LocalDownloadUtils.showSnackBar(context, 'Failed to start download', Colors.red); + } + } + + Future _deleteLocalDownload(int episodeIndex) async { + final episode = _episodes[episodeIndex]; + + final deletedCount = await LocalDownloadUtils.deleteLocalDownload(context, episode); + + if (deletedCount > 0) { + LocalDownloadUtils.showSnackBar( + context, + 'Deleted $deletedCount local download${deletedCount > 1 ? 's' : ''}', + Colors.orange + ); + } else { + LocalDownloadUtils.showSnackBar(context, 'Local download not found', Colors.red); + } } void _hideContextMenu() { @@ -247,6 +325,7 @@ class _PinepodsSavedState extends State { // REMOVE the episode from the list since it's no longer saved setState(() { _episodes.removeAt(episodeIndex); + _filterEpisodes(); // Update filtered list after removal }); _showSnackBar('Removed from saved episodes', Colors.orange); } else { @@ -468,50 +547,6 @@ class _PinepodsSavedState extends State { @override Widget build(BuildContext context) { - // Show context menu as a modal overlay if needed - if (_contextMenuEpisodeIndex != null) { - final episodeIndex = _contextMenuEpisodeIndex!; - WidgetsBinding.instance.addPostFrameCallback((_) { - showDialog( - context: context, - barrierColor: Colors.black.withOpacity(0.3), - builder: (context) => EpisodeContextMenu( - episode: _episodes[episodeIndex], - onSave: () { - Navigator.of(context).pop(); - _saveEpisode(episodeIndex); - }, - onRemoveSaved: () { - Navigator.of(context).pop(); - _removeSavedEpisode(episodeIndex); - }, - onDownload: _episodes[episodeIndex].downloaded - ? () { - Navigator.of(context).pop(); - _deleteEpisode(episodeIndex); - } - : () { - Navigator.of(context).pop(); - _downloadEpisode(episodeIndex); - }, - onQueue: () { - Navigator.of(context).pop(); - _toggleQueueEpisode(episodeIndex); - }, - onMarkComplete: () { - Navigator.of(context).pop(); - _toggleMarkComplete(episodeIndex); - }, - onDismiss: () { - Navigator.of(context).pop(); - _hideContextMenu(); - }, - ), - ); - }); - _contextMenuEpisodeIndex = null; - } - if (_isLoading) { return const SliverFillRemaining( child: Center( @@ -528,35 +563,15 @@ class _PinepodsSavedState extends State { } if (_errorMessage.isNotEmpty) { - return SliverFillRemaining( - child: Center( - child: Padding( - padding: const EdgeInsets.all(16.0), - child: Column( - mainAxisAlignment: MainAxisAlignment.center, - children: [ - Icon( - Icons.error_outline, - color: Theme.of(context).colorScheme.error, - size: 48, - ), - const SizedBox(height: 16), - Text( - _errorMessage, - style: TextStyle( - color: Theme.of(context).colorScheme.error, - ), - textAlign: TextAlign.center, - ), - const SizedBox(height: 16), - ElevatedButton( - onPressed: _refresh, - child: const Text('Retry'), - ), - ], - ), - ), - ), + return SliverServerErrorPage( + errorMessage: _errorMessage.isServerConnectionError + ? null + : _errorMessage, + onRetry: _refresh, + title: 'Saved Episodes Unavailable', + subtitle: _errorMessage.isServerConnectionError + ? 'Unable to connect to the PinePods server' + : 'Failed to load saved episodes', ); } @@ -706,7 +721,6 @@ class _PinepodsSavedState extends State { }, onLongPress: () => _showContextMenu(originalIndex), onPlayPressed: () => _playEpisode(episode), - onDownloadPressed: () => _downloadEpisode(originalIndex), ); }, childCount: _filteredEpisodes.length + 1, // +1 for header diff --git a/mobile/lib/ui/pinepods/search.dart b/mobile/lib/ui/pinepods/search.dart index a047ca49..d252f1b3 100644 --- a/mobile/lib/ui/pinepods/search.dart +++ b/mobile/lib/ui/pinepods/search.dart @@ -6,8 +6,11 @@ import 'package:flutter/services.dart'; import 'package:pinepods_mobile/bloc/settings/settings_bloc.dart'; import 'package:pinepods_mobile/entities/pinepods_search.dart'; import 'package:pinepods_mobile/services/pinepods/pinepods_service.dart'; +import 'package:pinepods_mobile/services/search_history_service.dart'; import 'package:pinepods_mobile/ui/pinepods/podcast_details.dart'; import 'package:pinepods_mobile/ui/widgets/platform_progress_indicator.dart'; +import 'package:pinepods_mobile/ui/widgets/server_error_page.dart'; +import 'package:pinepods_mobile/services/error_handling_service.dart'; import 'package:provider/provider.dart'; class PinepodsSearch extends StatefulWidget { @@ -26,11 +29,14 @@ class _PinepodsSearchState extends State { late TextEditingController _searchController; late FocusNode _searchFocusNode; final PinepodsService _pinepodsService = PinepodsService(); + final SearchHistoryService _searchHistoryService = SearchHistoryService(); SearchProvider _selectedProvider = SearchProvider.podcastIndex; bool _isLoading = false; + bool _showHistory = false; String? _errorMessage; List _searchResults = []; + List _searchHistory = []; Set _addedPodcastUrls = {}; @override @@ -43,9 +49,12 @@ class _PinepodsSearchState extends State { if (widget.searchTerm != null) { _searchController.text = widget.searchTerm!; _performSearch(widget.searchTerm!); + } else { + _loadSearchHistory(); } _initializeCredentials(); + _searchController.addListener(_onSearchChanged); } void _initializeCredentials() { @@ -67,11 +76,39 @@ class _PinepodsSearchState extends State { super.dispose(); } + Future _loadSearchHistory() async { + final history = await _searchHistoryService.getPodcastSearchHistory(); + if (mounted) { + setState(() { + _searchHistory = history; + _showHistory = _searchController.text.isEmpty && history.isNotEmpty; + }); + } + } + + void _onSearchChanged() { + final query = _searchController.text.trim(); + setState(() { + _showHistory = query.isEmpty && _searchHistory.isNotEmpty; + }); + } + + void _selectHistoryItem(String searchTerm) { + _searchController.text = searchTerm; + _performSearch(searchTerm); + } + + Future _removeHistoryItem(String searchTerm) async { + await _searchHistoryService.removePodcastSearchTerm(searchTerm); + await _loadSearchHistory(); + } + Future _performSearch(String query) async { if (query.trim().isEmpty) { setState(() { _searchResults = []; _errorMessage = null; + _showHistory = _searchHistory.isNotEmpty; }); return; } @@ -79,8 +116,13 @@ class _PinepodsSearchState extends State { setState(() { _isLoading = true; _errorMessage = null; + _showHistory = false; }); + // Save search term to history + await _searchHistoryService.addPodcastSearchTerm(query); + await _loadSearchHistory(); + try { final result = await _pinepodsService.searchPodcasts(query, _selectedProvider); final podcasts = result.getUnifiedPodcasts(); @@ -181,6 +223,103 @@ class _PinepodsSearchState extends State { ); } + Widget _buildSearchHistorySliver() { + return SliverFillRemaining( + hasScrollBody: false, + child: Container( + padding: const EdgeInsets.all(16), + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + Row( + children: [ + Text( + 'Recent Podcast Searches', + style: Theme.of(context).textTheme.titleMedium?.copyWith( + color: Theme.of(context).primaryColor, + fontWeight: FontWeight.bold, + ), + ), + const Spacer(), + if (_searchHistory.isNotEmpty) + TextButton( + onPressed: () async { + await _searchHistoryService.clearPodcastSearchHistory(); + await _loadSearchHistory(); + }, + child: Text( + 'Clear All', + style: TextStyle( + color: Theme.of(context).hintColor, + fontSize: 12, + ), + ), + ), + ], + ), + const SizedBox(height: 16), + if (_searchHistory.isEmpty) + Center( + child: Column( + children: [ + const SizedBox(height: 50), + Icon( + Icons.search, + size: 64, + color: Theme.of(context).primaryColor.withOpacity(0.5), + ), + const SizedBox(height: 16), + Text( + 'Search for Podcasts', + style: Theme.of(context).textTheme.headlineSmall?.copyWith( + color: Theme.of(context).primaryColor, + fontWeight: FontWeight.bold, + ), + ), + const SizedBox(height: 8), + Text( + 'Enter a search term above to find new podcasts to subscribe to', + style: Theme.of(context).textTheme.bodyMedium?.copyWith( + color: Theme.of(context).hintColor, + ), + textAlign: TextAlign.center, + ), + ], + ), + ) + else + ..._searchHistory.take(10).map((searchTerm) => Card( + margin: const EdgeInsets.symmetric(vertical: 2), + child: ListTile( + dense: true, + leading: Icon( + Icons.history, + color: Theme.of(context).hintColor, + size: 20, + ), + title: Text( + searchTerm, + style: Theme.of(context).textTheme.bodyMedium, + maxLines: 1, + overflow: TextOverflow.ellipsis, + ), + trailing: IconButton( + icon: Icon( + Icons.close, + size: 18, + color: Theme.of(context).hintColor, + ), + onPressed: () => _removeHistoryItem(searchTerm), + ), + onTap: () => _selectHistoryItem(searchTerm), + ), + )).toList(), + ], + ), + ), + ); + } + Widget _buildPodcastCard(UnifiedPinepodsPodcast podcast) { final isAdded = _addedPodcastUrls.contains(podcast.url); @@ -370,6 +509,11 @@ class _PinepodsSearchState extends State { autofocus: widget.searchTerm != null ? false : true, keyboardType: TextInputType.text, textInputAction: TextInputAction.search, + onTap: () { + setState(() { + _showHistory = _searchController.text.isEmpty && _searchHistory.isNotEmpty; + }); + }, decoration: const InputDecoration( hintText: 'Search for podcasts', border: InputBorder.none, @@ -393,6 +537,7 @@ class _PinepodsSearchState extends State { setState(() { _searchResults = []; _errorMessage = null; + _showHistory = _searchHistory.isNotEmpty; }); FocusScope.of(context).requestFocus(_searchFocusNode); SystemChannels.textInput.invokeMethod('TextInput.show'); @@ -441,38 +586,24 @@ class _PinepodsSearchState extends State { ), ), - // Search results - if (_isLoading) + // Search results or history + if (_showHistory) + _buildSearchHistorySliver() + else if (_isLoading) const SliverFillRemaining( hasScrollBody: false, child: Center(child: PlatformProgressIndicator()), ) else if (_errorMessage != null) - SliverFillRemaining( - hasScrollBody: false, - child: Center( - child: Column( - mainAxisAlignment: MainAxisAlignment.center, - children: [ - Icon( - Icons.error_outline, - size: 64, - color: Colors.red[300], - ), - const SizedBox(height: 16), - Text( - _errorMessage!, - textAlign: TextAlign.center, - style: Theme.of(context).textTheme.bodyLarge, - ), - const SizedBox(height: 16), - ElevatedButton( - onPressed: () => _performSearch(_searchController.text), - child: const Text('Retry'), - ), - ], - ), - ), + SliverServerErrorPage( + errorMessage: _errorMessage!.isServerConnectionError + ? null + : _errorMessage, + onRetry: () => _performSearch(_searchController.text), + title: 'Search Unavailable', + subtitle: _errorMessage!.isServerConnectionError + ? 'Unable to connect to the PinePods server' + : 'Failed to search for podcasts', ) else if (_searchResults.isEmpty && _searchController.text.isNotEmpty) SliverFillRemaining( diff --git a/mobile/lib/ui/pinepods/user_stats.dart b/mobile/lib/ui/pinepods/user_stats.dart index 68c455aa..d9fa33ac 100644 --- a/mobile/lib/ui/pinepods/user_stats.dart +++ b/mobile/lib/ui/pinepods/user_stats.dart @@ -4,6 +4,7 @@ import 'package:flutter/material.dart'; import 'package:pinepods_mobile/bloc/settings/settings_bloc.dart'; import 'package:pinepods_mobile/entities/user_stats.dart'; import 'package:pinepods_mobile/services/pinepods/pinepods_service.dart'; +import 'package:pinepods_mobile/services/logging/app_logger.dart'; import 'package:pinepods_mobile/ui/widgets/platform_progress_indicator.dart'; import 'package:pinepods_mobile/core/environment.dart'; import 'package:provider/provider.dart'; @@ -42,6 +43,25 @@ class _PinepodsUserStatsState extends State { } } + /// Calculate responsive cross axis count for stats grid + int _getStatsCrossAxisCount(BuildContext context) { + final screenWidth = MediaQuery.of(context).size.width; + if (screenWidth > 1200) return 4; // Very wide screens (large tablets, desktop) + if (screenWidth > 800) return 3; // Wide tablets like iPad + if (screenWidth > 500) return 2; // Standard phones and small tablets + return 1; // Very small phones (< 500px) + } + + /// Calculate responsive aspect ratio for stats cards + double _getStatsAspectRatio(BuildContext context) { + final screenWidth = MediaQuery.of(context).size.width; + if (screenWidth <= 500) { + // Single column on small screens - generous height for content + proper padding + return 2.2; // Allows space for icon + title + value + padding, handles text wrapping + } + return 1.0; // Square aspect ratio for multi-column layouts + } + Future _loadUserStats() async { final settingsBloc = Provider.of(context, listen: false); final settings = settingsBloc.currentSettings; @@ -75,13 +95,43 @@ class _PinepodsUserStatsState extends State { } Future _launchUrl(String url) async { - final uri = Uri.parse(url); - if (await canLaunchUrl(uri)) { - await launchUrl(uri, mode: LaunchMode.externalApplication); + final logger = AppLogger(); + logger.info('UserStats', 'Attempting to launch URL: $url'); + + try { + final uri = Uri.parse(url); + + // Try to launch directly first (works better on Android) + final launched = await launchUrl( + uri, + mode: LaunchMode.externalApplication, + ); + + if (!launched) { + logger.warning('UserStats', 'Direct URL launch failed, checking if URL can be launched'); + // If direct launch fails, check if URL can be launched + final canLaunch = await canLaunchUrl(uri); + if (!canLaunch) { + throw Exception('No app available to handle this URL'); + } + } else { + logger.info('UserStats', 'Successfully launched URL: $url'); + } + } catch (e) { + logger.error('UserStats', 'Failed to launch URL: $url', e.toString()); + // Show error if URL can't be launched + if (mounted) { + ScaffoldMessenger.of(context).showSnackBar( + SnackBar( + content: Text('Could not open link: $url'), + backgroundColor: Colors.red, + ), + ); + } } } - Widget _buildStatCard(String label, String value, {IconData? icon}) { + Widget _buildStatCard(String label, String value, {IconData? icon, Color? iconColor}) { return Card( elevation: 2, shape: RoundedRectangleBorder(borderRadius: BorderRadius.circular(12)), @@ -94,7 +144,7 @@ class _PinepodsUserStatsState extends State { Icon( icon, size: 32, - color: Theme.of(context).primaryColor, + color: iconColor ?? Theme.of(context).primaryColor, ), const SizedBox(height: 8), ], @@ -122,6 +172,21 @@ class _PinepodsUserStatsState extends State { ); } + /// Build sync status card that fits in the grid with consistent styling + Widget _buildSyncStatCard() { + if (_userStats == null) return const SizedBox.shrink(); + + final stats = _userStats!; + final isNotSyncing = stats.podSyncType.toLowerCase() == 'none'; + + return _buildStatCard( + 'Sync Status', + stats.syncStatusDescription, + icon: isNotSyncing ? Icons.sync_disabled : Icons.sync, + iconColor: isNotSyncing ? Colors.grey : null, + ); + } + Widget _buildSyncStatusCard() { if (_userStats == null) return const SizedBox.shrink(); @@ -384,10 +449,10 @@ class _PinepodsUserStatsState extends State { children: [ // Statistics Grid GridView.count( - crossAxisCount: 2, + crossAxisCount: _getStatsCrossAxisCount(context), shrinkWrap: true, physics: const NeverScrollableScrollPhysics(), - childAspectRatio: 1.0, + childAspectRatio: _getStatsAspectRatio(context), crossAxisSpacing: 12, mainAxisSpacing: 12, children: [ @@ -421,16 +486,13 @@ class _PinepodsUserStatsState extends State { _userStats?.episodesDownloaded.toString() ?? '', icon: Icons.download, ), + // Add sync status as a stat card to maintain consistent layout + _buildSyncStatCard(), ], ), const SizedBox(height: 16), - // Sync Status Card - _buildSyncStatusCard(), - - const SizedBox(height: 16), - // Info Card _buildInfoCard(), ], diff --git a/mobile/lib/ui/pinepods_podcast_app.dart b/mobile/lib/ui/pinepods_podcast_app.dart index 5db95bf2..677fdd6c 100644 --- a/mobile/lib/ui/pinepods_podcast_app.dart +++ b/mobile/lib/ui/pinepods_podcast_app.dart @@ -30,6 +30,9 @@ import 'package:pinepods_mobile/services/podcast/mobile_podcast_service.dart'; import 'package:pinepods_mobile/services/podcast/podcast_service.dart'; import 'package:pinepods_mobile/services/pinepods/pinepods_service.dart'; import 'package:pinepods_mobile/services/pinepods/pinepods_audio_service.dart'; +import 'package:pinepods_mobile/services/pinepods/oidc_service.dart'; +import 'package:pinepods_mobile/services/pinepods/login_service.dart'; +import 'package:pinepods_mobile/services/auth_notifier.dart'; import 'package:pinepods_mobile/services/settings/mobile_settings_service.dart'; import 'package:pinepods_mobile/ui/library/downloads.dart'; import 'package:pinepods_mobile/ui/library/library.dart'; @@ -68,6 +71,7 @@ import 'package:flutter_localizations/flutter_localizations.dart'; import 'package:logging/logging.dart'; import 'package:provider/provider.dart'; import 'package:url_launcher/url_launcher.dart'; +import 'package:pinepods_mobile/services/global_services.dart'; var theme = Themes.lightTheme().themeData; @@ -83,6 +87,8 @@ class PinepodsPodcastApp extends StatefulWidget { SettingsBloc? settingsBloc; MobileSettingsService mobileSettingsService; List certificateAuthorityBytes; + late PinepodsAudioService pinepodsAudioService; + late PinepodsService pinepodsService; PinepodsPodcastApp({ super.key, @@ -114,16 +120,23 @@ class PinepodsPodcastApp extends StatefulWidget { settingsBloc = SettingsBloc(mobileSettingsService); // Create and connect PinepodsAudioService for listen duration tracking - final pinepodsService = PinepodsService(); - final pinepodsAudioService = PinepodsAudioService( + pinepodsService = PinepodsService(); + pinepodsAudioService = PinepodsAudioService( audioPlayerService!, pinepodsService, settingsBloc!, ); // Connect the services for listen duration recording - (audioPlayerService as DefaultAudioPlayerService).setPinepodsAudioService(pinepodsAudioService); + (audioPlayerService as DefaultAudioPlayerService).setPinepodsAudioService( + pinepodsAudioService, + ); + // Initialize global services for app-wide access + GlobalServices.initialize( + pinepodsAudioService: pinepodsAudioService, + pinepodsService: pinepodsService, + ); podcastApi.addClientAuthorityBytes(certificateAuthorityBytes); } @@ -160,22 +173,23 @@ class PinepodsPodcastAppState extends State { return MultiProvider( providers: [ Provider( - create: (_) => SearchBloc( - podcastService: widget.podcastService!, - ), + create: (_) => SearchBloc(podcastService: widget.podcastService!), dispose: (_, value) => value.dispose(), ), Provider( - create: (_) => - EpisodeBloc(podcastService: widget.podcastService!, audioPlayerService: widget.audioPlayerService), + create: (_) => EpisodeBloc( + podcastService: widget.podcastService!, + audioPlayerService: widget.audioPlayerService, + ), dispose: (_, value) => value.dispose(), ), Provider( create: (_) => PodcastBloc( - podcastService: widget.podcastService!, - audioPlayerService: widget.audioPlayerService, - downloadService: widget.downloadService, - settingsService: widget.mobileSettingsService), + podcastService: widget.podcastService!, + audioPlayerService: widget.audioPlayerService, + downloadService: widget.downloadService, + settingsService: widget.mobileSettingsService, + ), dispose: (_, value) => value.dispose(), ), Provider( @@ -183,7 +197,8 @@ class PinepodsPodcastAppState extends State { dispose: (_, value) => value.dispose(), ), Provider( - create: (_) => AudioBloc(audioPlayerService: widget.audioPlayerService), + create: (_) => + AudioBloc(audioPlayerService: widget.audioPlayerService), dispose: (_, value) => value.dispose(), ), Provider( @@ -197,12 +212,8 @@ class PinepodsPodcastAppState extends State { ), dispose: (_, value) => value.dispose(), ), - Provider( - create: (_) => widget.audioPlayerService, - ), - Provider( - create: (_) => widget.podcastService!, - ) + Provider(create: (_) => widget.audioPlayerService), + Provider(create: (_) => widget.podcastService!), ], child: MaterialApp( debugShowCheckedModeBanner: false, @@ -210,7 +221,7 @@ class PinepodsPodcastAppState extends State { title: 'Pinepods Podcast Client', navigatorObservers: [NavigationRouteObserver()], localizationsDelegates: const >[ - AnytimeLocalisationsDelegate(), + PinepodsLocalisationsDelegate(), GlobalMaterialLocalizations.delegate, GlobalWidgetsLocalizations.delegate, GlobalCupertinoLocalizations.delegate, @@ -224,31 +235,28 @@ class PinepodsPodcastAppState extends State { // Uncomment builder below to enable accessibility checker tool. // builder: (context, child) => AccessibilityTools(child: child), home: const AuthWrapper( - child: AnytimeHomePage(title: 'PinePods Podcast Player'), + child: PinepodsHomePage(title: 'PinePods Podcast Player'), ), ), ); } } -class AnytimeHomePage extends StatefulWidget { +class PinepodsHomePage extends StatefulWidget { final String? title; final bool topBarVisible; - const AnytimeHomePage({ - super.key, - this.title, - this.topBarVisible = true, - }); + const PinepodsHomePage({super.key, this.title, this.topBarVisible = true}); @override - State createState() => _AnytimeHomePageState(); + State createState() => _PinepodsHomePageState(); } -class _AnytimeHomePageState extends State with WidgetsBindingObserver { +class _PinepodsHomePageState extends State + with WidgetsBindingObserver { StreamSubscription? deepLinkSubscription; - final log = Logger('_AnytimeHomePageState'); + final log = Logger('_PinepodsHomePageState'); bool handledInitialLink = false; Widget? library; @@ -267,21 +275,60 @@ class _AnytimeHomePageState extends State with WidgetsBindingOb } /// We listen to external links from outside the app. For example, someone may navigate - /// to a web page that supports 'Open with Anytime'. + /// to a web page that supports 'Open with Pinepods'. void _setupLinkListener() async { + print('Deep Link: Setting up link listener...'); final appLinks = AppLinks(); // AppLinks is singleton - // Subscribe to all events (initial link and further) + // Handle initial link if app was launched by one (cold start) + try { + final initialUri = await appLinks.getInitialLink(); + if (initialUri != null) { + print('Deep Link: App launched with initial link: $initialUri'); + _handleLinkEvent(initialUri); + } else { + print('Deep Link: No initial link found'); + } + } catch (e) { + print('Deep Link: Error getting initial link: $e'); + } + + // Subscribe to all events (further links while app is running) + print('Deep Link: Setting up stream listener...'); deepLinkSubscription = appLinks.uriLinkStream.listen((uri) { - // Do something (navigation, ...) + print('Deep Link: App received link while running: $uri'); _handleLinkEvent(uri); + }, onError: (err) { + print('Deep Link: Stream error: $err'); }); + + print('Deep Link: Link listener setup complete'); } /// This method handles the actual link supplied from [uni_links], either /// at app startup or during running. void _handleLinkEvent(Uri uri) async { - if ((uri.scheme == 'anytime-subscribe' || uri.scheme == 'https') && + print('Deep Link: Received link: $uri'); + print('Deep Link: Scheme: ${uri.scheme}, Host: ${uri.host}, Path: ${uri.path}'); + print('Deep Link: Query: ${uri.query}'); + print('Deep Link: QueryParameters: ${uri.queryParameters}'); + + // Handle OIDC authentication callback - be more flexible with path matching + if (uri.scheme == 'pinepods' && uri.host == 'auth') { + print('Deep Link: OIDC callback detected (flexible match)'); + await _handleOidcCallback(uri); + return; + } + + // Handle OIDC authentication callback - strict match + if (uri.scheme == 'pinepods' && uri.host == 'auth' && uri.path == '/callback') { + print('Deep Link: OIDC callback detected (strict match)'); + await _handleOidcCallback(uri); + return; + } + + // Handle podcast subscription links + if ((uri.scheme == 'pinepods-subscribe' || uri.scheme == 'https') && (uri.query.startsWith('uri=') || uri.query.startsWith('url='))) { var path = uri.query.substring(4); var loadPodcastBloc = Provider.of(context, listen: false); @@ -291,26 +338,169 @@ class _AnytimeHomePageState extends State with WidgetsBindingOb /// the BLoC) that we load this new URL. If not, we pop the stack until we are /// back at root and then load the podcast details page. if (routeName != null && routeName == 'podcastdetails') { - loadPodcastBloc.load(Feed( - podcast: Podcast.fromUrl(url: path), - backgroundFresh: false, - silently: false, - )); + loadPodcastBloc.load( + Feed( + podcast: Podcast.fromUrl(url: path), + backgroundFresh: false, + silently: false, + ), + ); } else { /// Pop back to route. Navigator.of(context).popUntil((route) { var currentRouteName = NavigationRouteObserver().top!.settings.name; - return currentRouteName == null || currentRouteName == '' || currentRouteName == '/'; + return currentRouteName == null || + currentRouteName == '' || + currentRouteName == '/'; }); /// Once we have reached the root route, push podcast details. await Navigator.push( context, MaterialPageRoute( - fullscreenDialog: true, - settings: const RouteSettings(name: 'podcastdetails'), - builder: (context) => PodcastDetails(Podcast.fromUrl(url: path), loadPodcastBloc)), + fullscreenDialog: true, + settings: const RouteSettings(name: 'podcastdetails'), + builder: (context) => + PodcastDetails(Podcast.fromUrl(url: path), loadPodcastBloc), + ), + ); + } + } + } + + /// Handle OIDC authentication callback + Future _handleOidcCallback(Uri uri) async { + try { + print('OIDC Callback: Received callback URL: $uri'); + + // Parse the callback result + final callbackResult = OidcService.parseCallback(uri.toString()); + + if (!callbackResult.isSuccess) { + print('OIDC Callback: Authentication failed: ${callbackResult.error}'); + if (context.mounted) { + ScaffoldMessenger.of(context).showSnackBar( + SnackBar( + content: Text('OIDC authentication failed: ${callbackResult.error}'), + backgroundColor: Colors.red, + ), + ); + } + return; + } + + // Check if we have an API key directly from the callback + if (callbackResult.hasApiKey) { + print('OIDC Callback: Found API key in callback, completing login'); + await _completeOidcLogin(callbackResult.apiKey!); + } else { + print('OIDC Callback: No API key found, traditional OAuth flow not implemented yet'); + if (context.mounted) { + ScaffoldMessenger.of(context).showSnackBar( + const SnackBar( + content: Text('OIDC callback received but no API key found'), + backgroundColor: Colors.orange, + ), + ); + } + } + + } catch (e) { + print('OIDC Callback: Error processing callback: $e'); + if (context.mounted) { + ScaffoldMessenger.of(context).showSnackBar( + SnackBar( + content: Text('Error processing OIDC callback: $e'), + backgroundColor: Colors.red, + ), + ); + } + } + } + + /// Complete OIDC login with the provided API key + Future _completeOidcLogin(String apiKey) async { + try { + print('OIDC Callback: Completing login with API key'); + + // We need to get the server URL - we can get it from the current settings + // since the user would have entered it during the initial OIDC flow + final settingsBloc = Provider.of(context, listen: false); + final settings = settingsBloc.currentSettings; + + // Check if we have a server URL from a previous attempt + String? serverUrl = settings.pinepodsServer; + + if (serverUrl == null || serverUrl.isEmpty) { + throw Exception('No server URL available for OIDC completion'); + } + + // Verify the API key works and get user details + // Verify API key + final isValidKey = await PinepodsLoginService.verifyApiKey(serverUrl, apiKey); + if (!isValidKey) { + throw Exception('API key verification failed'); + } + + // Get user ID + final userId = await PinepodsLoginService.getUserId(serverUrl, apiKey); + if (userId == null) { + throw Exception('Failed to get user ID'); + } + + // Get user details + final userDetails = await PinepodsLoginService.getUserDetails(serverUrl, apiKey, userId); + if (userDetails == null) { + throw Exception('Failed to get user details'); + } + + // Save the authentication details + settingsBloc.setPinepodsServer(serverUrl); + settingsBloc.setPinepodsApiKey(apiKey); + settingsBloc.setPinepodsUserId(userId); + + // Set additional user details if available + if (userDetails.username != null) { + settingsBloc.setPinepodsUsername(userDetails.username!); + } + if (userDetails.email != null) { + settingsBloc.setPinepodsEmail(userDetails.email!); + } + + // Fetch theme from server + await settingsBloc.fetchThemeFromServer(); + + print('OIDC Callback: Login completed successfully'); + + if (context.mounted) { + ScaffoldMessenger.of(context).showSnackBar( + const SnackBar( + content: Text('OIDC authentication successful!'), + backgroundColor: Colors.green, + ), + ); + + // Log current settings state for debugging + final currentSettings = settingsBloc.currentSettings; + print('OIDC Callback: Current settings after update:'); + print(' Server: ${currentSettings.pinepodsServer}'); + print(' API Key: ${currentSettings.pinepodsApiKey != null ? '[SET]' : '[NOT SET]'}'); + print(' User ID: ${currentSettings.pinepodsUserId}'); + print(' Username: ${currentSettings.pinepodsUsername}'); + + // Notify login success globally + AuthNotifier.notifyLoginSuccess(); + } + + } catch (e) { + print('OIDC Callback: Error completing login: $e'); + if (context.mounted) { + ScaffoldMessenger.of(context).showSnackBar( + SnackBar( + content: Text('Failed to complete OIDC login: $e'), + backgroundColor: Colors.red, + ), ); } } @@ -329,13 +519,28 @@ class _AnytimeHomePageState extends State with WidgetsBindingOb @override void didChangeAppLifecycleState(AppLifecycleState state) async { + print('Deep Link: App lifecycle state changed to: $state'); final audioBloc = Provider.of(context, listen: false); switch (state) { case AppLifecycleState.resumed: + print('Deep Link: App resumed - checking for pending deep links...'); audioBloc.transitionLifecycleState(LifecycleState.resume); + + // Check for any pending deep links when app resumes + try { + final appLinks = AppLinks(); + final initialUri = await appLinks.getInitialLink(); + if (initialUri != null) { + print('Deep Link: Found pending link on resume: $initialUri'); + _handleLinkEvent(initialUri); + } + } catch (e) { + print('Deep Link: Error checking for pending links on resume: $e'); + } break; case AppLifecycleState.paused: + print('Deep Link: App paused'); audioBloc.transitionLifecycleState(LifecycleState.pause); break; default: @@ -361,9 +566,7 @@ class _AnytimeHomePageState extends State with WidgetsBindingOb SliverVisibility( visible: widget.topBarVisible, sliver: SliverAppBar( - title: ExcludeSemantics( - child: TitleWidget(), - ), + title: ExcludeSemantics(child: TitleWidget()), backgroundColor: backgroundColour, floating: false, pinned: true, @@ -379,16 +582,12 @@ class _AnytimeHomePageState extends State with WidgetsBindingOb fullscreenDialog: false, settings: const RouteSettings(name: 'queue'), builder: (context) => Scaffold( - appBar: AppBar( - title: const Text('Queue'), - ), + appBar: AppBar(title: const Text('Queue')), body: const Column( children: [ Expanded( child: CustomScrollView( - slivers: [ - PinepodsQueue(), - ], + slivers: [PinepodsQueue()], ), ), MiniPlayer(), @@ -408,39 +607,53 @@ class _AnytimeHomePageState extends State with WidgetsBindingOb defaultTargetPlatform == TargetPlatform.iOS ? MaterialPageRoute( fullscreenDialog: false, - settings: const RouteSettings(name: 'pinepods_search'), - builder: (context) => const PinepodsSearch()) + settings: const RouteSettings( + name: 'pinepods_search', + ), + builder: (context) => + const PinepodsSearch(), + ) : SlideRightRoute( widget: const PinepodsSearch(), - settings: const RouteSettings(name: 'pinepods_search'), + settings: const RouteSettings( + name: 'pinepods_search', + ), ), ); }, ), PopupMenuButton( onSelected: _menuSelect, - icon: const Icon( - Icons.more_vert, - ), + icon: const Icon(Icons.more_vert), itemBuilder: (BuildContext context) { return >[ if (feedbackUrl.isNotEmpty) PopupMenuItem( - textStyle: Theme.of(context).textTheme.titleMedium, + textStyle: Theme.of( + context, + ).textTheme.titleMedium, value: 'feedback', child: Row( - crossAxisAlignment: CrossAxisAlignment.center, + crossAxisAlignment: + CrossAxisAlignment.center, children: [ const Padding( padding: EdgeInsets.only(right: 8.0), - child: Icon(Icons.feedback_outlined, size: 18.0), + child: Icon( + Icons.feedback_outlined, + size: 18.0, + ), + ), + Text( + L.of(context)!.feedback_menu_item_label, ), - Text(L.of(context)!.feedback_menu_item_label), ], ), ), PopupMenuItem( - textStyle: Theme.of(context).textTheme.titleMedium, + textStyle: Theme.of( + context, + ).textTheme.titleMedium, value: 'rss', child: Row( crossAxisAlignment: CrossAxisAlignment.center, @@ -454,7 +667,9 @@ class _AnytimeHomePageState extends State with WidgetsBindingOb ), ), PopupMenuItem( - textStyle: Theme.of(context).textTheme.titleMedium, + textStyle: Theme.of( + context, + ).textTheme.titleMedium, value: 'settings', child: Row( children: [ @@ -473,10 +688,12 @@ class _AnytimeHomePageState extends State with WidgetsBindingOb ), ), StreamBuilder( - stream: pager.currentPage, - builder: (BuildContext context, AsyncSnapshot snapshot) { - return _fragment(snapshot.data, searchBloc); - }), + stream: pager.currentPage, + builder: + (BuildContext context, AsyncSnapshot snapshot) { + return _fragment(snapshot.data, searchBloc); + }, + ), ], ), ), @@ -484,178 +701,246 @@ class _AnytimeHomePageState extends State with WidgetsBindingOb ], ), bottomNavigationBar: StreamBuilder( - stream: pager.currentPage, - initialData: 0, - builder: (BuildContext context, AsyncSnapshot snapshot) { - int index = snapshot.data ?? 0; - - return StreamBuilder( - stream: Provider.of(context).settings, - builder: (BuildContext context, AsyncSnapshot settingsSnapshot) { - final bottomBarOrder = settingsSnapshot.data?.bottomBarOrder ?? - ['Home', 'Feed', 'Saved', 'Podcasts', 'Downloads', 'History', 'Playlists', 'Search']; - - // Create a map of all available nav items - final Map allNavItems = { - 'Home': BottomNavItem( - icon: Icons.home, - label: 'Home', - isSelected: false, - ), - 'Feed': BottomNavItem( - icon: Icons.rss_feed, - label: 'Feed', - isSelected: false, - ), - 'Saved': BottomNavItem( - icon: Icons.bookmark, - label: 'Saved', - isSelected: false, - ), - 'Podcasts': BottomNavItem( - icon: Icons.podcasts, - label: 'Podcasts', - isSelected: false, - ), - 'Downloads': BottomNavItem( - icon: Icons.download, - label: 'Downloads', - isSelected: false, - ), - 'History': BottomNavItem( - icon: Icons.history, - label: 'History', - isSelected: false, - ), - 'Playlists': BottomNavItem( - icon: Icons.playlist_play, - label: 'Playlists', - isSelected: false, - ), - 'Search': BottomNavItem( - icon: Icons.search, - label: 'Search', - isSelected: false, - ), - }; - - // Create the ordered nav items based on settings - final List navItems = bottomBarOrder.map((label) { - final baseItem = allNavItems[label]!; - final itemIndex = bottomBarOrder.indexOf(label); - return BottomNavItem( - icon: index == itemIndex ? _getSelectedIcon(label) : _getUnselectedIcon(label), - label: label, - isSelected: index == itemIndex, - ); - }).toList(); - - return Container( - height: 70, - decoration: BoxDecoration( - color: Theme.of(context).bottomAppBarTheme.color, - border: Border( - top: BorderSide( - color: Theme.of(context).dividerColor, - width: 0.5, + stream: pager.currentPage, + initialData: 0, + builder: (BuildContext context, AsyncSnapshot snapshot) { + int index = snapshot.data ?? 0; + + return StreamBuilder( + stream: Provider.of(context).settings, + builder: + ( + BuildContext context, + AsyncSnapshot settingsSnapshot, + ) { + final bottomBarOrder = + settingsSnapshot.data?.bottomBarOrder ?? + [ + 'Home', + 'Feed', + 'Saved', + 'Podcasts', + 'Downloads', + 'History', + 'Playlists', + 'Search', + ]; + + // Create a map of all available nav items + final Map allNavItems = { + 'Home': BottomNavItem( + icon: Icons.home, + label: 'Home', + isSelected: false, + ), + 'Feed': BottomNavItem( + icon: Icons.rss_feed, + label: 'Feed', + isSelected: false, + ), + 'Saved': BottomNavItem( + icon: Icons.bookmark, + label: 'Saved', + isSelected: false, + ), + 'Podcasts': BottomNavItem( + icon: Icons.podcasts, + label: 'Podcasts', + isSelected: false, + ), + 'Downloads': BottomNavItem( + icon: Icons.download, + label: 'Downloads', + isSelected: false, + ), + 'History': BottomNavItem( + icon: Icons.history, + label: 'History', + isSelected: false, + ), + 'Playlists': BottomNavItem( + icon: Icons.playlist_play, + label: 'Playlists', + isSelected: false, + ), + 'Search': BottomNavItem( + icon: Icons.search, + label: 'Search', + isSelected: false, + ), + }; + + // Create the ordered nav items based on settings + final List navItems = bottomBarOrder.map(( + label, + ) { + final baseItem = allNavItems[label]!; + final itemIndex = bottomBarOrder.indexOf(label); + return BottomNavItem( + icon: index == itemIndex + ? _getSelectedIcon(label) + : _getUnselectedIcon(label), + label: label, + isSelected: index == itemIndex, + ); + }).toList(); + + // Calculate if all icons fit in the current screen width + final screenWidth = MediaQuery.of(context).size.width; + final iconWidth = 80.0; + final totalIconsWidth = navItems.length * iconWidth; + final isLandscape = MediaQuery.of(context).orientation == Orientation.landscape; + final shouldCenterInPortrait = !isLandscape && totalIconsWidth <= screenWidth; + + return Container( + height: 70 + MediaQuery.of(context).padding.bottom, + decoration: BoxDecoration( + color: Theme.of(context).bottomAppBarTheme.color, + border: Border( + top: BorderSide( + color: Theme.of(context).dividerColor, + width: 0.5, + ), ), ), - ), - child: MediaQuery.of(context).orientation == Orientation.landscape - ? Center( - child: Row( - mainAxisSize: MainAxisSize.min, - children: navItems.asMap().entries.map((entry) { - int itemIndex = entry.key; - BottomNavItem item = entry.value; - - return GestureDetector( - onTap: () => pager.changePage(itemIndex), - child: Container( - width: 80, - padding: const EdgeInsets.symmetric(vertical: 8), - child: Column( - mainAxisSize: MainAxisSize.min, - children: [ - Icon( - item.icon, - color: item.isSelected - ? Theme.of(context).iconTheme.color - : HSLColor.fromColor(Theme.of(context).bottomAppBarTheme.color!) - .withLightness(0.8) - .toColor(), - size: 24, + child: (isLandscape || shouldCenterInPortrait) + ? Padding( + padding: EdgeInsets.only( + bottom: MediaQuery.of(context).padding.bottom, + ), + child: Center( + child: Row( + mainAxisSize: MainAxisSize.min, + children: navItems.asMap().entries.map((entry) { + int itemIndex = entry.key; + BottomNavItem item = entry.value; + + return GestureDetector( + onTap: () => pager.changePage(itemIndex), + child: Container( + width: 80, + padding: const EdgeInsets.symmetric( + vertical: 8, ), - const SizedBox(height: 4), - Text( - item.label, - style: TextStyle( - fontSize: 11, - color: item.isSelected - ? Theme.of(context).iconTheme.color - : HSLColor.fromColor(Theme.of(context).bottomAppBarTheme.color!) - .withLightness(0.8) - .toColor(), - fontWeight: item.isSelected ? FontWeight.w600 : FontWeight.normal, - ), - textAlign: TextAlign.center, + child: Column( + mainAxisSize: MainAxisSize.min, + children: [ + Icon( + item.icon, + color: item.isSelected + ? Theme.of( + context, + ).iconTheme.color + : HSLColor.fromColor( + Theme.of(context) + .bottomAppBarTheme + .color!, + ) + .withLightness(0.8) + .toColor(), + size: 24, + ), + const SizedBox(height: 4), + Text( + item.label, + style: TextStyle( + fontSize: 11, + color: item.isSelected + ? Theme.of( + context, + ).iconTheme.color + : HSLColor.fromColor( + Theme.of(context) + .bottomAppBarTheme + .color!, + ) + .withLightness(0.8) + .toColor(), + fontWeight: item.isSelected + ? FontWeight.w600 + : FontWeight.normal, + ), + textAlign: TextAlign.center, + ), + ], ), - ], - ), - ), - ); - }).toList(), - ), - ) - : SingleChildScrollView( - scrollDirection: Axis.horizontal, - child: Row( - children: navItems.asMap().entries.map((entry) { - int itemIndex = entry.key; - BottomNavItem item = entry.value; - - return GestureDetector( - onTap: () => pager.changePage(itemIndex), - child: Container( - width: 80, - padding: const EdgeInsets.symmetric(vertical: 8), - child: Column( - mainAxisSize: MainAxisSize.min, - children: [ - Icon( - item.icon, - color: item.isSelected - ? Theme.of(context).iconTheme.color - : HSLColor.fromColor(Theme.of(context).bottomAppBarTheme.color!) - .withLightness(0.8) - .toColor(), - size: 24, + ), + ); + }).toList(), + ), + ), + ) + : Padding( + padding: EdgeInsets.only( + bottom: MediaQuery.of(context).padding.bottom, + ), + child: SingleChildScrollView( + scrollDirection: Axis.horizontal, + child: Row( + children: navItems.asMap().entries.map((entry) { + int itemIndex = entry.key; + BottomNavItem item = entry.value; + + return GestureDetector( + onTap: () => pager.changePage(itemIndex), + child: Container( + width: 80, + padding: const EdgeInsets.symmetric( + vertical: 8, ), - const SizedBox(height: 4), - Text( - item.label, - style: TextStyle( - fontSize: 11, - color: item.isSelected - ? Theme.of(context).iconTheme.color - : HSLColor.fromColor(Theme.of(context).bottomAppBarTheme.color!) - .withLightness(0.8) - .toColor(), - fontWeight: item.isSelected ? FontWeight.w600 : FontWeight.normal, - ), - textAlign: TextAlign.center, + child: Column( + mainAxisSize: MainAxisSize.min, + children: [ + Icon( + item.icon, + color: item.isSelected + ? Theme.of( + context, + ).iconTheme.color + : HSLColor.fromColor( + Theme.of(context) + .bottomAppBarTheme + .color!, + ) + .withLightness(0.8) + .toColor(), + size: 24, + ), + const SizedBox(height: 4), + Text( + item.label, + style: TextStyle( + fontSize: 11, + color: item.isSelected + ? Theme.of( + context, + ).iconTheme.color + : HSLColor.fromColor( + Theme.of(context) + .bottomAppBarTheme + .color!, + ) + .withLightness(0.8) + .toColor(), + fontWeight: item.isSelected + ? FontWeight.w600 + : FontWeight.normal, + ), + textAlign: TextAlign.center, + ), + ], ), - ], - ), - ), - ); - }).toList(), + ), + ); + }).toList(), + ), + ), ), - ), - ); - } - ); - }), + ); + }, + ); + }, + ), ), ); } @@ -663,13 +948,13 @@ class _AnytimeHomePageState extends State with WidgetsBindingOb Widget _fragment(int? index, EpisodeBloc searchBloc) { final settingsBloc = Provider.of(context, listen: false); final bottomBarOrder = settingsBloc.currentSettings.bottomBarOrder; - + if (index == null || index < 0 || index >= bottomBarOrder.length) { return const PinepodsHome(); // Default to Home } - + final pageLabel = bottomBarOrder[index]; - + switch (pageLabel) { case 'Home': return const PinepodsHome(); @@ -694,29 +979,47 @@ class _AnytimeHomePageState extends State with WidgetsBindingOb IconData _getSelectedIcon(String label) { switch (label) { - case 'Home': return Icons.home; - case 'Feed': return Icons.rss_feed; - case 'Saved': return Icons.bookmark; - case 'Podcasts': return Icons.podcasts; - case 'Downloads': return Icons.download; - case 'History': return Icons.history; - case 'Playlists': return Icons.playlist_play; - case 'Search': return Icons.search; - default: return Icons.home; + case 'Home': + return Icons.home; + case 'Feed': + return Icons.rss_feed; + case 'Saved': + return Icons.bookmark; + case 'Podcasts': + return Icons.podcasts; + case 'Downloads': + return Icons.download; + case 'History': + return Icons.history; + case 'Playlists': + return Icons.playlist_play; + case 'Search': + return Icons.search; + default: + return Icons.home; } } IconData _getUnselectedIcon(String label) { switch (label) { - case 'Home': return Icons.home_outlined; - case 'Feed': return Icons.rss_feed_outlined; - case 'Saved': return Icons.bookmark_outline; - case 'Podcasts': return Icons.podcasts_outlined; - case 'Downloads': return Icons.download_outlined; - case 'History': return Icons.history_outlined; - case 'Playlists': return Icons.playlist_play_outlined; - case 'Search': return Icons.search_outlined; - default: return Icons.home_outlined; + case 'Home': + return Icons.home_outlined; + case 'Feed': + return Icons.rss_feed_outlined; + case 'Saved': + return Icons.bookmark_outline; + case 'Podcasts': + return Icons.podcasts_outlined; + case 'Downloads': + return Icons.download_outlined; + case 'History': + return Icons.history_outlined; + case 'Playlists': + return Icons.playlist_play_outlined; + case 'Search': + return Icons.search_outlined; + default: + return Icons.home_outlined; } } @@ -727,7 +1030,6 @@ class _AnytimeHomePageState extends State with WidgetsBindingOb var url = ''; switch (choice) { - case 'settings': await Navigator.push( context, @@ -761,30 +1063,25 @@ class _AnytimeHomePageState extends State with WidgetsBindingOb ), actions: [ BasicDialogAction( - title: ActionText( - L.of(context)!.cancel_button_label, - ), + title: ActionText(L.of(context)!.cancel_button_label), onPressed: () { Navigator.pop(context); }, ), BasicDialogAction( - title: ActionText( - L.of(context)!.ok_button_label, - ), + title: ActionText(L.of(context)!.ok_button_label), iosIsDefaultAction: true, onPressed: () async { Navigator.of(context).pop(); // Close the dialog first - + // Show loading indicator showDialog( context: context, barrierDismissible: false, - builder: (context) => const Center( - child: CircularProgressIndicator(), - ), + builder: (context) => + const Center(child: CircularProgressIndicator()), ); - + try { await _handleRssUrl(url); } catch (e) { @@ -812,10 +1109,10 @@ class _AnytimeHomePageState extends State with WidgetsBindingOb // Get services final podcastApi = MobilePodcastApi(); final pinepodsService = PinepodsService(); - + // Load podcast feed from RSS final podcast = await podcastApi.loadFeed(url); - + // Create UnifiedPinepodsPodcast from the loaded feed final unifiedPodcast = UnifiedPinepodsPodcast( id: 0, @@ -834,13 +1131,13 @@ class _AnytimeHomePageState extends State with WidgetsBindingOb explicit: false, episodeCount: podcast.episodes?.length ?? 0, ); - + // Check if podcast is already followed bool isFollowing = false; final settingsBloc = Provider.of(context, listen: false); final settings = settingsBloc.currentSettings; final userId = settings.pinepodsUserId; - + if (userId != null) { try { isFollowing = await pinepodsService.checkPodcastExists( @@ -852,10 +1149,10 @@ class _AnytimeHomePageState extends State with WidgetsBindingOb print('Failed to check if podcast exists: $e'); } } - + if (mounted) { Navigator.of(context).pop(); // Close loading dialog - + // Navigate to podcast details page Navigator.push( context, @@ -879,16 +1176,13 @@ class _AnytimeHomePageState extends State with WidgetsBindingOb void _launchFeedback() async { final uri = Uri.parse(feedbackUrl); - if (!await launchUrl( - uri, - mode: LaunchMode.externalApplication, - )) { + if (!await launchUrl(uri, mode: LaunchMode.externalApplication)) { throw Exception('Could not launch $uri'); } } void _launchEmail() async { - final uri = Uri.parse('mailto:hello@anytimeplayer.app'); + final uri = Uri.parse('mailto:mobile-support@pinepods.online'); if (await canLaunchUrl(uri)) { await launchUrl(uri); @@ -899,12 +1193,12 @@ class _AnytimeHomePageState extends State with WidgetsBindingOb } class TitleWidget extends StatelessWidget { - TitleWidget({ - super.key, - }); + TitleWidget({super.key}); String _generateGravatarUrl(String email, {int size = 40}) { - final hash = md5.convert(utf8.encode(email.toLowerCase().trim())).toString(); + final hash = md5 + .convert(utf8.encode(email.toLowerCase().trim())) + .toString(); return 'https://www.gravatar.com/avatar/$hash?s=$size&d=identicon'; } @@ -943,7 +1237,9 @@ class TitleWidget extends StatelessWidget { Text( 'Pods', style: TextStyle( - color: Theme.of(context).brightness == Brightness.light ? Colors.black : Colors.white, + color: Theme.of(context).brightness == Brightness.light + ? Colors.black + : Colors.white, fontWeight: FontWeight.bold, fontFamily: 'MontserratRegular', fontSize: 18, @@ -1002,7 +1298,9 @@ class TitleWidget extends StatelessWidget { child: Text( username, style: TextStyle( - color: Theme.of(context).brightness == Brightness.light ? Colors.black : Colors.white, + color: Theme.of(context).brightness == Brightness.light + ? Colors.black + : Colors.white, fontWeight: FontWeight.w600, fontSize: 16, ), diff --git a/mobile/lib/ui/podcast/mini_player.dart b/mobile/lib/ui/podcast/mini_player.dart index 798d49ad..010b14d6 100644 --- a/mobile/lib/ui/podcast/mini_player.dart +++ b/mobile/lib/ui/podcast/mini_player.dart @@ -3,6 +3,7 @@ // found in the LICENSE file. import 'dart:async'; +import 'dart:ui'; import 'package:pinepods_mobile/bloc/podcast/audio_bloc.dart'; import 'package:pinepods_mobile/entities/episode.dart'; @@ -101,9 +102,27 @@ class _MiniPlayerBuilderState extends State<_MiniPlayerBuilder> routeSettings: const RouteSettings(name: 'nowplaying'), isScrollControlled: true, builder: (BuildContext modalContext) { + final contextPadding = MediaQuery.of(context).padding.top; + final modalPadding = MediaQuery.of(modalContext).padding.top; + + // Get the actual system safe area from the window (works on both iOS and Android) + final window = PlatformDispatcher.instance.views.first; + final systemPadding = window.padding.top / window.devicePixelRatio; + + // Use the best available padding value + double topPadding; + if (contextPadding > 0) { + topPadding = contextPadding; + } else if (modalPadding > 0) { + topPadding = modalPadding; + } else { + // Fall back to system padding if both contexts have 0 + topPadding = systemPadding; + } + + return Padding( - padding: - EdgeInsets.only(top: MediaQuery.of(context).padding.top), + padding: EdgeInsets.only(top: topPadding), child: const NowPlaying(), ); }, diff --git a/mobile/lib/ui/podcast/now_playing_options.dart b/mobile/lib/ui/podcast/now_playing_options.dart index 35198dc6..37b99a09 100644 --- a/mobile/lib/ui/podcast/now_playing_options.dart +++ b/mobile/lib/ui/podcast/now_playing_options.dart @@ -58,11 +58,19 @@ class _NowPlayingOptionsSelectorState extends State { // snap: true, // snapSizes: [minSize, maxSize], builder: (BuildContext context, ScrollController scrollController) { - return DefaultTabController( - animationDuration: !draggableController!.isAttached || draggableController!.size <= minSize - ? const Duration(seconds: 0) - : kTabScrollDuration, - length: 2, + return StreamBuilder( + initialData: QueueEmptyState(), + stream: queueBloc.queue, + builder: (context, queueSnapshot) { + final hasTranscript = queueSnapshot.hasData && + queueSnapshot.data?.playing != null && + queueSnapshot.data!.playing!.hasTranscripts; + + return DefaultTabController( + animationDuration: !draggableController!.isAttached || draggableController!.size <= minSize + ? const Duration(seconds: 0) + : kTabScrollDuration, + length: hasTranscript ? 2 : 1, child: LayoutBuilder(builder: (BuildContext ctx, BoxConstraints constraints) { return SingleChildScrollView( controller: scrollController, @@ -119,60 +127,44 @@ class _NowPlayingOptionsSelectorState extends State { : BorderSide(color: Colors.grey[800]!, width: 1.0), ), ), - child: StreamBuilder( - initialData: QueueEmptyState(), - stream: queueBloc.queue, - builder: (context, snapshot) { - return TabBar( - onTap: (index) { - DefaultTabController.of(ctx).animateTo(index); + child: TabBar( + onTap: (index) { + DefaultTabController.of(ctx).animateTo(index); - if (draggableController != null && draggableController!.size < 1.0) { - draggableController!.animateTo( - 1.0, - duration: const Duration(milliseconds: 150), - curve: Curves.easeInOut, - ); - } - }, - automaticIndicatorColorAdjustment: false, - indicatorPadding: EdgeInsets.zero, - - /// Little hack to hide the indicator when closed - indicatorColor: draggableController != null && - (!draggableController!.isAttached || draggableController!.size <= minSize) - ? Theme.of(context).secondaryHeaderColor - : null, - tabs: [ - Padding( - padding: const EdgeInsets.only(top: 8.0, bottom: 8.0), - child: Text( - L.of(context)!.up_next_queue_label.toUpperCase(), - style: Theme.of(context).textTheme.labelLarge, - ), - ), - Padding( - padding: const EdgeInsets.only(top: 8.0, bottom: 8.0), - // If the episode does not support transcripts, grey out - // the option. - child: snapshot.hasData && - snapshot.data?.playing != null && - snapshot.data!.playing!.hasTranscripts - ? Text( - L.of(context)!.transcript_label.toUpperCase(), - style: Theme.of(context).textTheme.labelLarge, - ) - : Text( - L.of(context)!.transcript_label.toUpperCase(), - style: Theme.of(context) - .textTheme - .labelLarge! - .copyWith(color: theme.disabledColor), - ), - ), - ], + if (draggableController != null && draggableController!.size < 1.0) { + draggableController!.animateTo( + 1.0, + duration: const Duration(milliseconds: 150), + curve: Curves.easeInOut, ); - }), + } + }, + automaticIndicatorColorAdjustment: false, + indicatorPadding: EdgeInsets.zero, + + /// Little hack to hide the indicator when closed + indicatorColor: draggableController != null && + (!draggableController!.isAttached || draggableController!.size <= minSize) + ? Theme.of(context).secondaryHeaderColor + : null, + tabs: [ + Padding( + padding: const EdgeInsets.only(top: 8.0, bottom: 8.0), + child: Text( + L.of(context)!.up_next_queue_label.toUpperCase(), + style: Theme.of(context).textTheme.labelLarge, + ), + ), + if (hasTranscript) + Padding( + padding: const EdgeInsets.only(top: 8.0, bottom: 8.0), + child: Text( + L.of(context)!.transcript_label.toUpperCase(), + style: Theme.of(context).textTheme.labelLarge, + ), + ), + ], + ), ), const Padding(padding: EdgeInsets.only(bottom: 12.0)), Expanded( @@ -188,7 +180,8 @@ class _NowPlayingOptionsSelectorState extends State { isPinepodsConnected ? const PinepodsUpNextView() : const UpNextView(), - const TranscriptView(), + if (hasTranscript) + const TranscriptView(), ], ); }, @@ -201,6 +194,7 @@ class _NowPlayingOptionsSelectorState extends State { ); }), ); + }); }, ); } @@ -250,8 +244,16 @@ class _NowPlayingOptionsSelectorWideState extends State( + initialData: QueueEmptyState(), + stream: queueBloc.queue, + builder: (context, queueSnapshot) { + final hasTranscript = queueSnapshot.hasData && + queueSnapshot.data?.playing != null && + queueSnapshot.data!.playing!.hasTranscripts; + + return DefaultTabController( + length: hasTranscript ? 2 : 1, child: LayoutBuilder(builder: (BuildContext ctx, BoxConstraints constraints) { return SingleChildScrollView( controller: scrollController, @@ -273,46 +275,33 @@ class _NowPlayingOptionsSelectorWideState extends State( - initialData: QueueEmptyState(), - stream: queueBloc.queue, - builder: (context, snapshot) { - return TabBar( - automaticIndicatorColorAdjustment: false, - tabs: [ - Padding( - padding: const EdgeInsets.only(top: 16.0, bottom: 16.0), - child: Text( - L.of(context)!.up_next_queue_label.toUpperCase(), - style: Theme.of(context).textTheme.labelLarge, - ), - ), - Padding( - padding: const EdgeInsets.only(top: 16.0, bottom: 16.0), - child: snapshot.hasData && - snapshot.data?.playing != null && - snapshot.data!.playing!.hasTranscripts - ? Text( - L.of(context)!.transcript_label.toUpperCase(), - style: Theme.of(context).textTheme.labelLarge, - ) - : Text( - L.of(context)!.transcript_label.toUpperCase(), - style: Theme.of(context) - .textTheme - .labelLarge! - .copyWith(color: theme.disabledColor), - ), - ), - ], - ); - }), + child: TabBar( + automaticIndicatorColorAdjustment: false, + tabs: [ + Padding( + padding: const EdgeInsets.only(top: 16.0, bottom: 16.0), + child: Text( + L.of(context)!.up_next_queue_label.toUpperCase(), + style: Theme.of(context).textTheme.labelLarge, + ), + ), + if (hasTranscript) + Padding( + padding: const EdgeInsets.only(top: 16.0, bottom: 16.0), + child: Text( + L.of(context)!.transcript_label.toUpperCase(), + style: Theme.of(context).textTheme.labelLarge, + ), + ), + ], + ), ), - const Expanded( + Expanded( child: TabBarView( children: [ - UpNextView(), - TranscriptView(), + const UpNextView(), + if (hasTranscript) + const TranscriptView(), ], ), ), @@ -322,6 +311,7 @@ class _NowPlayingOptionsSelectorWideState extends State createState() => _FollowButtonState(); +} + +class _FollowButtonState extends State { + bool _isLoading = false; + @override Widget build(BuildContext context) { final bloc = Provider.of(context); + // If we're in loading state, show loading button immediately + if (_isLoading) { + print('Follow button: Showing loading spinner - _isLoading=$_isLoading'); + return Semantics( + liveRegion: true, + child: OutlinedButton.icon( + style: OutlinedButton.styleFrom( + padding: const EdgeInsets.fromLTRB(10.0, 4.0, 10.0, 4.0), + shape: RoundedRectangleBorder(borderRadius: BorderRadius.circular(8.0)), + ), + icon: const SizedBox( + width: 20, + height: 20, + child: CircularProgressIndicator( + strokeWidth: 3.0, + valueColor: AlwaysStoppedAnimation(Colors.blue), + ), + ), + label: Text(L.of(context)!.subscribe_label), + onPressed: null, + ), + ); + } + return StreamBuilder>( stream: bloc.details, builder: (context, snapshot) { @@ -707,11 +741,31 @@ class FollowButton extends StatelessWidget { if (snapshot.hasData) { final state = snapshot.data; - if (state is BlocPopulatedState) { + if (state is BlocLoadingState) { + ready = false; + subscribed = state.data?.subscribed ?? false; + print('Follow button: BlocLoadingState - ready=$ready, subscribed=$subscribed, _isLoading=$_isLoading'); + } else if (state is BlocPopulatedState) { ready = true; subscribed = state.results!.subscribed; + print('Follow button: BlocPopulatedState - ready=$ready, subscribed=$subscribed, _isLoading=$_isLoading'); + + // Reset loading state when we get populated data + if (_isLoading) { + print('Follow button: Resetting loading state'); + WidgetsBinding.instance.addPostFrameCallback((_) { + if (mounted) { + setState(() { + _isLoading = false; + }); + print('Follow button: Loading state reset to false'); + } + }); + } } } + print('Follow button: Rendering normal UI - ready=$ready, subscribed=$subscribed, _isLoading=$_isLoading'); + return Semantics( liveRegion: true, child: subscribed @@ -769,15 +823,122 @@ class FollowButton extends StatelessWidget { Icons.add, ), label: Text(L.of(context)!.subscribe_label), - onPressed: ready - ? () { + onPressed: ready && !_isLoading + ? () async { + print('Follow button: CLICKED - Setting loading to true'); + setState(() { + _isLoading = true; + }); + print('Follow button: Loading state set to: $_isLoading'); + bloc.podcastEvent(PodcastEvent.subscribe); + + // Show loading indicator for a minimum time to be visible + await Future.delayed(const Duration(milliseconds: 300)); + + // After successful subscription, check if we should switch to PinePods context + await _handlePostSubscriptionContextSwitch(context, bloc); } : null, ), ); }); } + + Future _handlePostSubscriptionContextSwitch(BuildContext context, PodcastBloc bloc) async { + print('Follow button: Starting context switch check'); + // Wait a short moment for subscription to complete, then check if we should context switch + await Future.delayed(const Duration(milliseconds: 500)); + + if (!mounted) { + print('Follow button: Widget not mounted, skipping context switch'); + return; + } + + // Check if we're in PinePods environment and should switch contexts + final settingsBloc = Provider.of(context, listen: false); + final settings = settingsBloc.currentSettings; + + if (settings.pinepodsServer != null && + settings.pinepodsApiKey != null && + settings.pinepodsUserId != null) { + + // Check if the podcast is now subscribed to PinePods + final pinepodsService = PinepodsService(); + pinepodsService.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); + + try { + final isSubscribed = await pinepodsService.checkPodcastExists( + widget.podcast.title, + widget.podcast.url ?? '', + settings.pinepodsUserId! + ); + + if (isSubscribed && mounted) { + print('Follow button: Podcast is subscribed, switching to PinePods context'); + + // Reset loading state before context switch + setState(() { + _isLoading = false; + }); + + // Create unified podcast object for PinePods context + final unifiedPodcast = UnifiedPinepodsPodcast( + id: 0, // Will be fetched by PinePods component + indexId: 0, // Default for subscribed podcasts + title: widget.podcast.title, + url: widget.podcast.url ?? '', + originalUrl: widget.podcast.url ?? '', + link: widget.podcast.link ?? '', + description: widget.podcast.description ?? '', + author: widget.podcast.copyright ?? '', + ownerName: widget.podcast.copyright ?? '', + image: widget.podcast.imageUrl ?? '', + artwork: widget.podcast.imageUrl ?? '', + lastUpdateTime: 0, + explicit: false, + episodeCount: 0, // Will be loaded + ); + + // Replace current route with PinePods podcast details + Navigator.pushReplacement( + context, + MaterialPageRoute( + settings: const RouteSettings(name: 'pinepodspodcastdetails'), + builder: (context) => PinepodsPodcastDetails( + podcast: unifiedPodcast, + isFollowing: true, + ), + ), + ); + } else { + print('Follow button: Podcast not subscribed or widget not mounted, staying in current context'); + // Reset loading state if not switching contexts + if (mounted) { + setState(() { + _isLoading = false; + }); + } + } + } catch (e) { + print('Error checking post-subscription status: $e'); + // Reset loading state on error + if (mounted) { + setState(() { + _isLoading = false; + }); + } + } + } else { + print('Follow button: Not in PinePods environment, staying in RSS context'); + // Reset loading state if not in PinePods environment + if (mounted) { + setState(() { + _isLoading = false; + }); + } + } + } } class FilterButton extends StatelessWidget { diff --git a/mobile/lib/ui/podcast/transcript_view.dart b/mobile/lib/ui/podcast/transcript_view.dart index 7782f8d4..6be41b08 100644 --- a/mobile/lib/ui/podcast/transcript_view.dart +++ b/mobile/lib/ui/podcast/transcript_view.dart @@ -15,6 +15,7 @@ import 'package:pinepods_mobile/state/transcript_state_event.dart'; import 'package:pinepods_mobile/ui/widgets/platform_progress_indicator.dart'; import 'package:extended_image/extended_image.dart'; import 'package:flutter/material.dart'; +import 'package:flutter_html/flutter_html.dart'; import 'package:logging/logging.dart'; import 'package:provider/provider.dart'; import 'package:scrollable_positioned_list/scrollable_positioned_list.dart'; @@ -43,6 +44,7 @@ class _TranscriptViewState extends State { bool forceTranscriptUpdate = false; bool first = true; bool scrolling = false; + bool isHtmlTranscript = false; String speaker = ''; RegExp exp = RegExp(r'(^)(\[?)(?[A-Za-z0-9\s]+)(\]?)(\s?)(:)'); @@ -65,7 +67,7 @@ class _TranscriptViewState extends State { // Listen to playback position updates and scroll to the correct items in the transcript // if we have auto scroll enabled. _positionSubscription = audioBloc.playPosition!.listen((event) { - if (_itemScrollController.isAttached) { + if (_itemScrollController.isAttached && !isHtmlTranscript) { var transcript = event.episode?.transcript; if (transcript != null && transcript.subtitles.isNotEmpty) { @@ -200,25 +202,15 @@ class _TranscriptViewState extends State { crossAxisAlignment: CrossAxisAlignment.center, children: [ Text( - L.of(context)!.no_transcript_available_label, + 'Transcript Error', style: Theme.of(context).textTheme.titleLarge, textAlign: TextAlign.center, ), - Padding( - padding: const EdgeInsets.only(top: 32.0, bottom: 32.0), - child: OutlinedButton( - onPressed: () { - final uri = Uri.parse(L.of(context)!.transcript_why_not_url); - - unawaited( - canLaunchUrl(uri).then((value) => launchUrl(uri)), - ); - }, - child: Text( - L.of(context)!.transcript_why_not_label, - style: Theme.of(context).textTheme.titleSmall, - textAlign: TextAlign.center, - )), + const SizedBox(height: 16), + Text( + 'Failed to load transcript. The episode has transcript support but there was an error retrieving or parsing the transcript data.', + style: Theme.of(context).textTheme.bodyMedium, + textAlign: TextAlign.center, ), ], ), @@ -226,6 +218,24 @@ class _TranscriptViewState extends State { ); } else { final items = transcriptSnapshot.data!.transcript?.subtitles ?? []; + + // Detect if this is an HTML transcript (single item with HTMLFULL marker) + final isLikelyHtmlTranscript = items.length == 1 && + items.first.data != null && + items.first.data!.startsWith('{{HTMLFULL}}'); + + // Update the state flag for HTML transcript detection + if (isLikelyHtmlTranscript != isHtmlTranscript) { + WidgetsBinding.instance.addPostFrameCallback((_) { + setState(() { + isHtmlTranscript = isLikelyHtmlTranscript; + if (isHtmlTranscript) { + autoScroll = false; + autoScrollEnabled = false; + } + }); + }); + } return Column( children: [ @@ -266,31 +276,33 @@ class _TranscriptViewState extends State { }), ), ), - Padding( - padding: const EdgeInsets.only(left: 8.0, right: 8.0), - child: Row( - mainAxisAlignment: MainAxisAlignment.end, - crossAxisAlignment: CrossAxisAlignment.center, - children: [ - Text(L.of(context)!.auto_scroll_transcript_label), - Switch( - value: autoScroll, - onChanged: autoScrollEnabled - ? (bool enableAutoScroll) { - setState(() { - autoScroll = enableAutoScroll; - - if (enableAutoScroll) { - forceTranscriptUpdate = true; - } - }); - } - : null, - ), - ], + if (!isHtmlTranscript) + Padding( + padding: const EdgeInsets.only(left: 8.0, right: 8.0), + child: Row( + mainAxisAlignment: MainAxisAlignment.end, + crossAxisAlignment: CrossAxisAlignment.center, + children: [ + Text(L.of(context)!.auto_scroll_transcript_label), + Switch( + value: autoScroll, + onChanged: autoScrollEnabled + ? (bool enableAutoScroll) { + setState(() { + autoScroll = enableAutoScroll; + + if (enableAutoScroll) { + forceTranscriptUpdate = true; + } + }); + } + : null, + ), + ], + ), ), - ), - if (queueSnapshot.hasData && + if (!isHtmlTranscript && + queueSnapshot.hasData && queueSnapshot.data?.playing != null && queueSnapshot.data!.playing!.persons.isNotEmpty) Container( @@ -385,12 +397,23 @@ class SubtitleWidget extends StatelessWidget { @override Widget build(BuildContext context) { final audioBloc = Provider.of(context, listen: false); + final data = subtitle.data ?? ''; + final isFullHtmlTranscript = data.startsWith('{{HTMLFULL}}'); + // For full HTML transcripts, render as a simple container without timing or clickability + if (isFullHtmlTranscript) { + return Container( + width: double.infinity, + padding: const EdgeInsets.all(16.0), + child: _buildSubtitleContent(context), + ); + } + + // For timed transcripts (JSON, SRT, chunked HTML), render with timing and clickability return GestureDetector( behavior: HitTestBehavior.translucent, onTap: () { final p = subtitle.start + margin; - audioBloc.transitionPosition(p.inSeconds.toDouble()); }, child: Container( @@ -407,10 +430,7 @@ class SubtitleWidget extends StatelessWidget { : '${_formatDuration(subtitle.start)} - ${subtitle.speaker}', style: Theme.of(context).textTheme.titleSmall, ), - Text( - subtitle.data!, - style: Theme.of(context).textTheme.titleMedium, - ), + _buildSubtitleContent(context), const Padding(padding: EdgeInsets.fromLTRB(0.0, 0.0, 0.0, 16.0)) ], ), @@ -418,6 +438,90 @@ class SubtitleWidget extends StatelessWidget { ); } + Widget _buildSubtitleContent(BuildContext context) { + final data = subtitle.data ?? ''; + + // Check if this is full HTML content (single document) + if (data.startsWith('{{HTMLFULL}}')) { + final htmlContent = data.substring(12); // Remove '{{HTMLFULL}}' marker + + return Html( + data: htmlContent, + style: { + 'body': Style( + margin: Margins.zero, + padding: HtmlPaddings.zero, + fontSize: FontSize(Theme.of(context).textTheme.bodyMedium?.fontSize ?? 14), + color: Theme.of(context).textTheme.bodyMedium?.color, + fontFamily: Theme.of(context).textTheme.bodyMedium?.fontFamily, + lineHeight: const LineHeight(1.5), + ), + 'a': Style( + color: Theme.of(context).primaryColor, + textDecoration: TextDecoration.underline, + ), + 'p': Style( + margin: Margins.only(bottom: 12), + padding: HtmlPaddings.zero, + ), + 'h1, h2, h3, h4, h5, h6': Style( + margin: Margins.only(top: 16, bottom: 8), + fontWeight: FontWeight.bold, + ), + 'strong, b': Style( + fontWeight: FontWeight.bold, + ), + 'em, i': Style( + fontStyle: FontStyle.italic, + ), + }, + onLinkTap: (url, attributes, element) { + if (url != null) { + final uri = Uri.parse(url); + launchUrl(uri); + } + }, + ); + } + // Check if this is chunked HTML content (legacy) + else if (data.startsWith('{{HTML}}')) { + final htmlContent = data.substring(8); // Remove '{{HTML}}' marker + + return Html( + data: htmlContent, + style: { + 'body': Style( + margin: Margins.zero, + padding: HtmlPaddings.zero, + fontSize: FontSize(Theme.of(context).textTheme.titleMedium?.fontSize ?? 16), + color: Theme.of(context).textTheme.titleMedium?.color, + fontFamily: Theme.of(context).textTheme.titleMedium?.fontFamily, + ), + 'a': Style( + color: Theme.of(context).primaryColor, + textDecoration: TextDecoration.underline, + ), + 'p': Style( + margin: Margins.zero, + padding: HtmlPaddings.zero, + ), + }, + onLinkTap: (url, attributes, element) { + if (url != null) { + final uri = Uri.parse(url); + launchUrl(uri); + } + }, + ); + } else { + // Render as plain text for non-HTML content + return Text( + data, + style: Theme.of(context).textTheme.titleMedium, + ); + } + } + String _formatDuration(Duration duration) { final hh = (duration.inHours).toString().padLeft(2, '0'); final mm = (duration.inMinutes % 60).toString().padLeft(2, '0'); diff --git a/mobile/lib/ui/settings/pinepods_login.dart b/mobile/lib/ui/settings/pinepods_login.dart index ef56ce79..816809ae 100644 --- a/mobile/lib/ui/settings/pinepods_login.dart +++ b/mobile/lib/ui/settings/pinepods_login.dart @@ -5,6 +5,7 @@ import 'package:pinepods_mobile/l10n/L.dart'; import 'package:pinepods_mobile/services/pinepods/pinepods_service.dart'; import 'package:pinepods_mobile/services/pinepods/login_service.dart'; import 'package:pinepods_mobile/ui/widgets/restart_widget.dart'; +import 'package:pinepods_mobile/ui/settings/settings_section_label.dart'; import 'package:provider/provider.dart'; import 'package:http/http.dart' as http; import 'dart:convert'; @@ -20,11 +21,17 @@ class _PinepodsLoginWidgetState extends State { final _serverController = TextEditingController(); final _usernameController = TextEditingController(); final _passwordController = TextEditingController(); + final _mfaController = TextEditingController(); bool _isLoading = false; + bool _showMfaField = false; String _errorMessage = ''; bool _isLoggedIn = false; String? _connectedServer; + String? _tempServerUrl; + String? _tempUsername; + int? _tempUserId; + String? _tempMfaSessionToken; @override void initState() { @@ -53,116 +60,103 @@ class _PinepodsLoginWidgetState extends State { }); } - Future _verifyPinepodsInstance(String serverUrl) async { - // Normalize the URL by removing trailing slashes - final normalizedUrl = serverUrl.trim().replaceAll(RegExp(r'/$'), ''); - final url = Uri.parse('$normalizedUrl/api/pinepods_check'); - - try { - final response = await http.get(url); - - if (response.statusCode == 200) { - final data = jsonDecode(response.body); - return data['pinepods_instance'] == true; - } - return false; - } catch (e) { - print('Error verifying PinePods instance: $e'); - return false; - } - } - - Future _login(String serverUrl, String username, String password) async { - // Normalize the URL by removing trailing slashes - final normalizedUrl = serverUrl.trim().replaceAll(RegExp(r'/$'), ''); - - // Create Basic Auth header - final credentials = base64Encode(utf8.encode('$username:$password')); - final authHeader = 'Basic $credentials'; - - final url = Uri.parse('$normalizedUrl/api/data/get_key'); - - try { - final response = await http.get( - url, - headers: {'Authorization': authHeader}, - ); - - if (response.statusCode == 200) { - final data = jsonDecode(response.body); - return data['retrieved_key']; - } - return null; - } catch (e) { - print('Login error: $e'); - return null; - } - } - - Future _verifyApiKey(String serverUrl, String apiKey) async { - final url = Uri.parse('$serverUrl/api/data/verify_key'); - - try { - final response = await http.get( - url, - headers: {'Api-Key': apiKey}, - ); - - if (response.statusCode == 200) { - final data = jsonDecode(response.body); - return data['status'] == 'success'; - } - return false; - } catch (e) { - print('Error verifying API key: $e'); - return false; - } - } - Future _connectToPinepods() async { - if (_serverController.text.isEmpty || + if (!_showMfaField && (_serverController.text.isEmpty || _usernameController.text.isEmpty || - _passwordController.text.isEmpty) { + _passwordController.text.isEmpty)) { setState(() { _errorMessage = 'Please fill in all fields'; }); return; } + if (_showMfaField && _mfaController.text.isEmpty) { + setState(() { + _errorMessage = 'Please enter your MFA code'; + }); + return; + } + setState(() { _isLoading = true; _errorMessage = ''; }); try { - final serverUrl = _serverController.text.trim(); - final username = _usernameController.text.trim(); - final password = _passwordController.text; + if (_showMfaField && _tempMfaSessionToken != null) { + // Complete MFA login flow + final mfaCode = _mfaController.text.trim(); + final result = await PinepodsLoginService.completeMfaLogin( + serverUrl: _tempServerUrl!, + username: _tempUsername!, + mfaSessionToken: _tempMfaSessionToken!, + mfaCode: mfaCode, + ); + + if (result.isSuccess) { + // Save the connection details including user ID + var settingsBloc = Provider.of(context, listen: false); + settingsBloc.setPinepodsServer(result.serverUrl!); + settingsBloc.setPinepodsApiKey(result.apiKey!); + settingsBloc.setPinepodsUserId(result.userId!); - // Use the same login service as the startup login - final result = await PinepodsLoginService.login( - serverUrl, - username, - password, - ); + setState(() { + _isLoggedIn = true; + _connectedServer = _tempServerUrl; + _showMfaField = false; + _tempServerUrl = null; + _tempUsername = null; + _tempUserId = null; + _tempMfaSessionToken = null; + _isLoading = false; + }); + } else { + setState(() { + _errorMessage = result.errorMessage ?? 'MFA verification failed'; + _isLoading = false; + }); + } + } else { + // Initial login flow + final serverUrl = _serverController.text.trim(); + final username = _usernameController.text.trim(); + final password = _passwordController.text; - if (result.isSuccess) { - // Save the connection details including user ID - var settingsBloc = Provider.of(context, listen: false); - settingsBloc.setPinepodsServer(result.serverUrl!); - settingsBloc.setPinepodsApiKey(result.apiKey!); - settingsBloc.setPinepodsUserId(result.userId!); + final result = await PinepodsLoginService.login( + serverUrl, + username, + password, + ); - setState(() { - _isLoggedIn = true; - _connectedServer = serverUrl; - _isLoading = false; - }); - } else { - setState(() { - _errorMessage = result.errorMessage ?? 'Login failed'; - _isLoading = false; - }); + if (result.isSuccess) { + // Save the connection details including user ID + var settingsBloc = Provider.of(context, listen: false); + settingsBloc.setPinepodsServer(result.serverUrl!); + settingsBloc.setPinepodsApiKey(result.apiKey!); + settingsBloc.setPinepodsUserId(result.userId!); + + setState(() { + _isLoggedIn = true; + _connectedServer = serverUrl; + _isLoading = false; + }); + } else if (result.requiresMfa) { + // Store MFA session info and show MFA field + setState(() { + _tempServerUrl = result.serverUrl; + _tempUsername = result.username; + _tempUserId = result.userId; + _tempMfaSessionToken = result.mfaSessionToken; + _showMfaField = true; + _isLoading = false; + _errorMessage = 'Please enter your MFA code'; + }); + } else { + setState(() { + _errorMessage = result.errorMessage ?? 'Login failed'; + _isLoading = false; + }); + } } } catch (e) { setState(() { @@ -172,6 +166,18 @@ class _PinepodsLoginWidgetState extends State { } } + void _resetMfa() { + setState(() { + _showMfaField = false; + _tempServerUrl = null; + _tempUsername = null; + _tempUserId = null; + _tempMfaSessionToken = null; + _mfaController.clear(); + _errorMessage = ''; + }); + } + void _logOut() async { var settingsBloc = Provider.of(context, listen: false); @@ -203,16 +209,7 @@ class _PinepodsLoginWidgetState extends State { return Column( crossAxisAlignment: CrossAxisAlignment.start, children: [ - const Padding( - padding: EdgeInsets.only(left: 16.0, top: 16.0, bottom: 8.0), - child: Text( - 'PinePods Server', - style: TextStyle( - fontSize: 14.0, - fontWeight: FontWeight.bold, - ), - ), - ), + SettingsDividerLabel(label: 'PinePods Server'), const Divider(), if (_isLoggedIn) ...[ // Show connected status @@ -252,7 +249,26 @@ class _PinepodsLoginWidgetState extends State { labelText: 'Password', ), obscureText: true, + enabled: !_showMfaField, ), + // MFA Field (shown when MFA is required) + if (_showMfaField) ...[ + const SizedBox(height: 16), + TextField( + controller: _mfaController, + decoration: InputDecoration( + labelText: 'MFA Code', + hintText: 'Enter 6-digit code', + suffixIcon: IconButton( + icon: const Icon(Icons.close), + onPressed: _resetMfa, + tooltip: 'Cancel MFA', + ), + ), + keyboardType: TextInputType.number, + maxLength: 6, + ), + ], if (_errorMessage.isNotEmpty) ...[ const SizedBox(height: 16), Text( @@ -273,7 +289,7 @@ class _PinepodsLoginWidgetState extends State { strokeWidth: 2, ), ) - : const Text('Connect'), + : Text(_showMfaField ? 'Verify MFA Code' : 'Connect'), ), ), ], @@ -289,6 +305,7 @@ class _PinepodsLoginWidgetState extends State { _serverController.dispose(); _usernameController.dispose(); _passwordController.dispose(); + _mfaController.dispose(); super.dispose(); } } \ No newline at end of file diff --git a/mobile/lib/ui/settings/settings.dart b/mobile/lib/ui/settings/settings.dart index 7a34036e..9570e41d 100644 --- a/mobile/lib/ui/settings/settings.dart +++ b/mobile/lib/ui/settings/settings.dart @@ -13,6 +13,7 @@ import 'package:pinepods_mobile/ui/settings/settings_section_label.dart'; import 'package:pinepods_mobile/ui/settings/bottom_bar_order.dart'; import 'package:pinepods_mobile/ui/widgets/action_text.dart'; import 'package:pinepods_mobile/ui/settings/pinepods_login.dart'; +import 'package:pinepods_mobile/ui/debug/debug_logs_page.dart'; import 'package:pinepods_mobile/ui/themes.dart'; import 'package:file_picker/file_picker.dart'; import 'package:flutter/cupertino.dart'; @@ -56,53 +57,75 @@ class _SettingsState extends State { return ListView( children: [ SettingsDividerLabel(label: L.of(context)!.settings_personalisation_divider_label), + const Divider(), MergeSemantics( - child: ListTile( - shape: const RoundedRectangleBorder(side: BorderSide.none), - title: Text(L.of(context)!.settings_theme_switch_label), - subtitle: Text(ThemeRegistry.getTheme(snapshot.data!.theme).description), - trailing: DropdownButton( - value: snapshot.data!.theme, - icon: const Icon(Icons.palette), - underline: Container(), - items: ThemeRegistry.themeList.map((theme) { - return DropdownMenuItem( - value: theme.key, - child: Row( - mainAxisSize: MainAxisSize.min, - children: [ - Container( - width: 16, - height: 16, - decoration: BoxDecoration( - color: theme.isDark ? Colors.grey[800] : Colors.grey[200], - border: Border.all( - color: theme.themeData.colorScheme.primary, - width: 2, - ), - borderRadius: BorderRadius.circular(8), - ), - ), - const SizedBox(width: 8), - Flexible( - child: Text( - theme.name, - overflow: TextOverflow.ellipsis, - ), - ), - ], + child: Padding( + padding: const EdgeInsets.symmetric(horizontal: 16.0, vertical: 8.0), + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + Text( + L.of(context)!.settings_theme_switch_label, + style: Theme.of(context).textTheme.titleMedium, + ), + const SizedBox(height: 8), + Text( + ThemeRegistry.getTheme(snapshot.data!.theme).description, + style: Theme.of(context).textTheme.bodyMedium?.copyWith( + color: Theme.of(context).colorScheme.onSurface.withOpacity(0.7), ), - ); - }).toList(), - onChanged: (String? newTheme) { - if (newTheme != null) { - settingsBloc.setTheme(newTheme); - } - }, + ), + const SizedBox(height: 12), + Row( + children: [ + const Icon(Icons.palette, size: 20), + const SizedBox(width: 12), + Expanded( + child: DropdownButton( + value: snapshot.data!.theme, + isExpanded: true, + underline: Container(), + items: ThemeRegistry.themeList.map((theme) { + return DropdownMenuItem( + value: theme.key, + child: Row( + children: [ + Container( + width: 16, + height: 16, + decoration: BoxDecoration( + color: theme.isDark ? Colors.grey[800] : Colors.grey[200], + border: Border.all( + color: theme.themeData.colorScheme.primary, + width: 2, + ), + borderRadius: BorderRadius.circular(8), + ), + ), + const SizedBox(width: 8), + Expanded( + child: Text( + theme.name, + overflow: TextOverflow.ellipsis, + ), + ), + ], + ), + ); + }).toList(), + onChanged: (String? newTheme) { + if (newTheme != null) { + settingsBloc.setTheme(newTheme); + } + }, + ), + ), + ], + ), + ], ), ), ), - SettingsDividerLabel(label: L.of(context)!.settings_episodes_divider_label), sdcard ? MergeSemantics( child: ListTile( @@ -128,6 +151,7 @@ class _SettingsState extends State { width: 0, ), SettingsDividerLabel(label: 'Navigation'), + const Divider(), ListTile( title: const Text('Reorganize Bottom Bar'), subtitle: const Text('Customize the order of bottom navigation items'), @@ -142,6 +166,7 @@ class _SettingsState extends State { }, ), SettingsDividerLabel(label: L.of(context)!.settings_playback_divider_label), + const Divider(), MergeSemantics( child: ListTile( title: Text(L.of(context)!.settings_auto_open_now_playing), @@ -152,7 +177,23 @@ class _SettingsState extends State { ), ), const SearchProviderWidget(), + SettingsDividerLabel(label: 'Debug'), + const Divider(), + ListTile( + title: const Text('App Logs'), + subtitle: const Text('View debug logs and device information'), + leading: const Icon(Icons.bug_report), + onTap: () { + Navigator.push( + context, + MaterialPageRoute( + builder: (context) => const DebugLogsPage(), + ), + ); + }, + ), const PinepodsLoginWidget(), + const _WebAppInfoWidget(), ], ); }); @@ -241,3 +282,58 @@ class _SettingsState extends State { }); } } + +class _WebAppInfoWidget extends StatelessWidget { + const _WebAppInfoWidget(); + + @override + Widget build(BuildContext context) { + return Consumer( + builder: (context, settingsBloc, child) { + final settings = settingsBloc.currentSettings; + final serverUrl = settings.pinepodsServer; + + // Only show if user is connected to a server + if (serverUrl == null || serverUrl.isEmpty) { + return const SizedBox.shrink(); + } + + return Padding( + padding: const EdgeInsets.all(16.0), + child: Card( + child: Padding( + padding: const EdgeInsets.all(16.0), + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + Row( + children: [ + Icon( + Icons.web, + color: Theme.of(context).primaryColor, + size: 20, + ), + const SizedBox(width: 8), + Text( + 'Web App Settings', + style: TextStyle( + fontWeight: FontWeight.bold, + color: Theme.of(context).primaryColor, + ), + ), + ], + ), + const SizedBox(height: 8), + Text( + 'Many more server side and user settings available from the PinePods web app. Please head to $serverUrl to adjust much more', + style: Theme.of(context).textTheme.bodySmall, + ), + ], + ), + ), + ), + ); + }, + ); + } +} diff --git a/mobile/lib/ui/themes.dart b/mobile/lib/ui/themes.dart index 01de2c79..6d5f3ec6 100644 --- a/mobile/lib/ui/themes.dart +++ b/mobile/lib/ui/themes.dart @@ -254,7 +254,7 @@ ThemeData _buildLightTheme() { error: Color(0xffd32f2f), onSurface: Color(0xfffb8c00), ), - bottomAppBarTheme: const BottomAppBarTheme().copyWith( + bottomAppBarTheme: const BottomAppBarThemeData().copyWith( color: const Color(0xffffffff), ), cardTheme: const CardThemeData().copyWith( @@ -324,7 +324,7 @@ ThemeData _buildDarkTheme() { error: Color(0xffd32f2f), onSurface: Color(0xffffffff), ), - bottomAppBarTheme: const BottomAppBarTheme().copyWith( + bottomAppBarTheme: const BottomAppBarThemeData().copyWith( color: const Color(0xff222222), ), cardTheme: const CardThemeData().copyWith( @@ -401,7 +401,7 @@ ThemeData _buildNordTheme() { onSurface: Color(0xfff6f5f4), background: Color(0xff3C4252), ), - bottomAppBarTheme: const BottomAppBarTheme().copyWith( + bottomAppBarTheme: const BottomAppBarThemeData().copyWith( color: const Color(0xff2e3440), ), cardTheme: const CardThemeData().copyWith( @@ -483,7 +483,7 @@ ThemeData _buildDraculaTheme() { onSurface: Color(0xfff6f5f4), background: Color(0xff282A36), ), - bottomAppBarTheme: const BottomAppBarTheme().copyWith( + bottomAppBarTheme: const BottomAppBarThemeData().copyWith( color: const Color(0xff262626), ), cardTheme: const CardThemeData().copyWith( @@ -565,7 +565,7 @@ ThemeData _buildNordicTheme() { onSurface: Color(0xff656d76), background: Color(0xffeceff4), ), - bottomAppBarTheme: const BottomAppBarTheme().copyWith( + bottomAppBarTheme: const BottomAppBarThemeData().copyWith( color: const Color(0xffe5e9f0), ), cardTheme: const CardThemeData().copyWith( @@ -647,7 +647,7 @@ ThemeData _buildGruvboxDarkTheme() { onSurface: Color(0xff868729), background: Color(0xff32302f), ), - bottomAppBarTheme: const BottomAppBarTheme().copyWith( + bottomAppBarTheme: const BottomAppBarThemeData().copyWith( color: const Color(0xff282828), ), cardTheme: const CardThemeData().copyWith( @@ -729,7 +729,7 @@ ThemeData _buildCatppuccinMochaTheme() { onSurface: Color(0xffcdd6f4), background: Color(0xff1e1e2e), ), - bottomAppBarTheme: const BottomAppBarTheme().copyWith( + bottomAppBarTheme: const BottomAppBarThemeData().copyWith( color: const Color(0xff11111b), ), cardTheme: const CardThemeData().copyWith( @@ -829,7 +829,7 @@ ThemeData _buildAbyssTheme() { onSurface: Color(0xfff6f5f4), background: Color(0xff000C18), ), - bottomAppBarTheme: const BottomAppBarTheme().copyWith( + bottomAppBarTheme: const BottomAppBarThemeData().copyWith( color: const Color(0xff051336), ), cardTheme: const CardThemeData().copyWith( @@ -911,7 +911,7 @@ ThemeData _buildCyberSynthwaveTheme() { onSurface: Color(0xffeee6ff), background: Color(0xff1a1721), ), - bottomAppBarTheme: const BottomAppBarTheme().copyWith( + bottomAppBarTheme: const BottomAppBarThemeData().copyWith( color: const Color(0xff2a1f3a), ), cardTheme: const CardThemeData().copyWith( @@ -993,7 +993,7 @@ ThemeData _buildMidnightOceanTheme() { onSurface: Color(0xffe2e8f0), background: Color(0xff0f172a), ), - bottomAppBarTheme: const BottomAppBarTheme().copyWith( + bottomAppBarTheme: const BottomAppBarThemeData().copyWith( color: const Color(0xff1e293b), ), cardTheme: const CardThemeData().copyWith( @@ -1075,7 +1075,7 @@ ThemeData _buildForestDepthsTheme() { onSurface: Color(0xffc9e4ca), background: Color(0xff1a2f1f), ), - bottomAppBarTheme: const BottomAppBarTheme().copyWith( + bottomAppBarTheme: const BottomAppBarThemeData().copyWith( color: const Color(0xff2d4a33), ), cardTheme: const CardThemeData().copyWith( @@ -1157,7 +1157,7 @@ ThemeData _buildSunsetHorizonTheme() { onSurface: Color(0xffffd9c0), background: Color(0xff2b1c2c), ), - bottomAppBarTheme: const BottomAppBarTheme().copyWith( + bottomAppBarTheme: const BottomAppBarThemeData().copyWith( color: const Color(0xff432e44), ), cardTheme: const CardThemeData().copyWith( @@ -1239,7 +1239,7 @@ ThemeData _buildArcticFrostTheme() { onSurface: Color(0xffeceff4), background: Color(0xff1a1d21), ), - bottomAppBarTheme: const BottomAppBarTheme().copyWith( + bottomAppBarTheme: const BottomAppBarThemeData().copyWith( color: const Color(0xff2a2f36), ), cardTheme: const CardThemeData().copyWith( @@ -1321,7 +1321,7 @@ ThemeData _buildNeonTheme() { onSurface: Color(0xff9F9DA1), background: Color(0xff120e16), ), - bottomAppBarTheme: const BottomAppBarTheme().copyWith( + bottomAppBarTheme: const BottomAppBarThemeData().copyWith( color: const Color(0xff120e16), ), cardTheme: const CardThemeData().copyWith( @@ -1403,7 +1403,7 @@ ThemeData _buildKimbieTheme() { onSurface: Color(0xffB1AD86), background: Color(0xff221a0f), ), - bottomAppBarTheme: const BottomAppBarTheme().copyWith( + bottomAppBarTheme: const BottomAppBarThemeData().copyWith( color: const Color(0xff131510), ), cardTheme: const CardThemeData().copyWith( @@ -1485,7 +1485,7 @@ ThemeData _buildGruvboxLightTheme() { onSurface: Color(0xff5f5750), background: Color(0xfff9f5d7), ), - bottomAppBarTheme: const BottomAppBarTheme().copyWith( + bottomAppBarTheme: const BottomAppBarThemeData().copyWith( color: const Color(0xfffbf1c7), ), cardTheme: const CardThemeData().copyWith( @@ -1567,7 +1567,7 @@ ThemeData _buildGreenMeanieTheme() { onSurface: Color(0xff489D50), background: Color(0xff142e28), ), - bottomAppBarTheme: const BottomAppBarTheme().copyWith( + bottomAppBarTheme: const BottomAppBarThemeData().copyWith( color: const Color(0xff292A2E), ), cardTheme: const CardThemeData().copyWith( @@ -1649,7 +1649,7 @@ ThemeData _buildWildberriesTheme() { onSurface: Color(0xffCF8B3E), background: Color(0xff240041), ), - bottomAppBarTheme: const BottomAppBarTheme().copyWith( + bottomAppBarTheme: const BottomAppBarThemeData().copyWith( color: const Color(0xff19002E), ), cardTheme: const CardThemeData().copyWith( @@ -1731,7 +1731,7 @@ ThemeData _buildSoftLavenderTheme() { onSurface: Color(0xff3e2851), background: Color(0xfff5f2ff), ), - bottomAppBarTheme: const BottomAppBarTheme().copyWith( + bottomAppBarTheme: const BottomAppBarThemeData().copyWith( color: const Color(0xfff8f5ff), ), cardTheme: const CardThemeData().copyWith( @@ -1813,7 +1813,7 @@ ThemeData _buildMintyFreshTheme() { onSurface: Color(0xff134e4a), background: Color(0xffecfdf5), ), - bottomAppBarTheme: const BottomAppBarTheme().copyWith( + bottomAppBarTheme: const BottomAppBarThemeData().copyWith( color: const Color(0xfff0fdfa), ), cardTheme: const CardThemeData().copyWith( @@ -1895,7 +1895,7 @@ ThemeData _buildWarmVanillaTheme() { onSurface: Color(0xff78350f), background: Color(0xfffef3c7), ), - bottomAppBarTheme: const BottomAppBarTheme().copyWith( + bottomAppBarTheme: const BottomAppBarThemeData().copyWith( color: const Color(0xfffffbeb), ), cardTheme: const CardThemeData().copyWith( @@ -1977,7 +1977,7 @@ ThemeData _buildCoastalBlueTheme() { onSurface: Color(0xff0c4a6e), background: Color(0xffe0f2fe), ), - bottomAppBarTheme: const BottomAppBarTheme().copyWith( + bottomAppBarTheme: const BottomAppBarThemeData().copyWith( color: const Color(0xfff0f9ff), ), cardTheme: const CardThemeData().copyWith( @@ -2059,7 +2059,7 @@ ThemeData _buildPaperCreamTheme() { onSurface: Color(0xff4a3728), background: Color(0xfff5f2ef), ), - bottomAppBarTheme: const BottomAppBarTheme().copyWith( + bottomAppBarTheme: const BottomAppBarThemeData().copyWith( color: const Color(0xfff9f7f4), ), cardTheme: const CardThemeData().copyWith( @@ -2141,7 +2141,7 @@ ThemeData _buildGithubLightTheme() { onSurface: Color(0xff1f2328), background: Color(0xfff6f8fa), ), - bottomAppBarTheme: const BottomAppBarTheme().copyWith( + bottomAppBarTheme: const BottomAppBarThemeData().copyWith( color: const Color(0xffffffff), ), cardTheme: const CardThemeData().copyWith( @@ -2223,7 +2223,7 @@ ThemeData _buildHotDogStandTheme() { onSurface: Color(0xff000000), background: Color(0xffffff00), ), - bottomAppBarTheme: const BottomAppBarTheme().copyWith( + bottomAppBarTheme: const BottomAppBarThemeData().copyWith( color: const Color(0xffffff00), ), cardTheme: const CardThemeData().copyWith( diff --git a/mobile/lib/ui/utils/local_download_utils.dart b/mobile/lib/ui/utils/local_download_utils.dart new file mode 100644 index 00000000..ef20e325 --- /dev/null +++ b/mobile/lib/ui/utils/local_download_utils.dart @@ -0,0 +1,225 @@ +// lib/ui/utils/local_download_utils.dart +import 'package:flutter/material.dart'; +import 'package:pinepods_mobile/entities/pinepods_episode.dart'; +import 'package:pinepods_mobile/entities/episode.dart'; +import 'package:pinepods_mobile/entities/downloadable.dart'; +import 'package:pinepods_mobile/services/logging/app_logger.dart'; +import 'package:pinepods_mobile/bloc/podcast/podcast_bloc.dart'; +import 'package:provider/provider.dart'; + +/// Utility class for managing local downloads of PinePods episodes +class LocalDownloadUtils { + static final Map _localDownloadStatusCache = {}; + + /// Generate consistent GUID for PinePods episodes for local downloads + static String generateEpisodeGuid(PinepodsEpisode episode) { + return 'pinepods_${episode.episodeId}'; + } + + /// Clear the local download status cache (call on refresh) + static void clearCache() { + _localDownloadStatusCache.clear(); + } + + /// Check if episode is downloaded locally with caching + static Future isEpisodeDownloadedLocally( + BuildContext context, + PinepodsEpisode episode + ) async { + final guid = generateEpisodeGuid(episode); + final logger = AppLogger(); + logger.debug('LocalDownload', 'Checking download status for episode: ${episode.episodeTitle}, GUID: $guid'); + + // Check cache first + if (_localDownloadStatusCache.containsKey(guid)) { + logger.debug('LocalDownload', 'Found cached status for $guid: ${_localDownloadStatusCache[guid]}'); + return _localDownloadStatusCache[guid]!; + } + + try { + final podcastBloc = Provider.of(context, listen: false); + + // Get all episodes and find matches with both new and old GUID formats + final allEpisodes = await podcastBloc.podcastService.repository.findAllEpisodes(); + final matchingEpisodes = allEpisodes.where((ep) => + ep.guid == guid || ep.guid.startsWith('${guid}_') + ).toList(); + + logger.debug('LocalDownload', 'Repository lookup for $guid: found ${matchingEpisodes.length} matching episodes'); + + // Found matching episodes + + // Consider downloaded if ANY matching episode is downloaded + final isDownloaded = matchingEpisodes.any((ep) => + ep.downloaded || ep.downloadState == DownloadState.downloaded + ); + + logger.debug('LocalDownload', 'Final download status for $guid: $isDownloaded'); + + // Cache the result + _localDownloadStatusCache[guid] = isDownloaded; + return isDownloaded; + } catch (e) { + final logger = AppLogger(); + logger.error('LocalDownload', 'Error checking local download status for episode: ${episode.episodeTitle}', e.toString()); + return false; + } + } + + /// Update local download status cache + static void updateLocalDownloadStatus(PinepodsEpisode episode, bool isDownloaded) { + final guid = generateEpisodeGuid(episode); + _localDownloadStatusCache[guid] = isDownloaded; + } + + /// Proactively load local download status for a list of episodes + static Future loadLocalDownloadStatuses( + BuildContext context, + List episodes + ) async { + final logger = AppLogger(); + logger.debug('LocalDownload', 'Loading local download statuses for ${episodes.length} episodes'); + + try { + final podcastBloc = Provider.of(context, listen: false); + + // Get all downloaded episodes from repository + final allEpisodes = await podcastBloc.podcastService.repository.findAllEpisodes(); + logger.debug('LocalDownload', 'Found ${allEpisodes.length} total episodes in repository'); + + // Filter to PinePods episodes only and log them + final pinepodsEpisodes = allEpisodes.where((ep) => ep.guid.startsWith('pinepods_')).toList(); + logger.debug('LocalDownload', 'Found ${pinepodsEpisodes.length} PinePods episodes in repository'); + + // Found pinepods episodes in repository + + // Now check each episode against the repository + for (final episode in episodes) { + final guid = generateEpisodeGuid(episode); + + // Look for episodes with either new format (pinepods_123) or old format (pinepods_123_timestamp) + final matchingEpisodes = allEpisodes.where((ep) => + ep.guid == guid || ep.guid.startsWith('${guid}_') + ).toList(); + + // Checking for matching episodes + + // Consider downloaded if ANY matching episode is downloaded + final isDownloaded = matchingEpisodes.any((ep) => + ep.downloaded || ep.downloadState == DownloadState.downloaded + ); + + _localDownloadStatusCache[guid] = isDownloaded; + // Episode status checked + } + + // Download statuses cached + + } catch (e) { + logger.error('LocalDownload', 'Error loading local download statuses', e.toString()); + } + } + + /// Download episode locally + static Future localDownloadEpisode( + BuildContext context, + PinepodsEpisode episode + ) async { + final logger = AppLogger(); + + try { + // Convert PinepodsEpisode to Episode for local download + final localEpisode = Episode( + guid: generateEpisodeGuid(episode), + pguid: 'pinepods_${episode.podcastName.replaceAll(' ', '_').toLowerCase()}', + podcast: episode.podcastName, + title: episode.episodeTitle, + description: episode.episodeDescription, + imageUrl: episode.episodeArtwork, + contentUrl: episode.episodeUrl, + duration: episode.episodeDuration, + publicationDate: DateTime.tryParse(episode.episodePubDate), + author: episode.podcastName, + season: 0, + episode: 0, + position: episode.listenDuration ?? 0, + played: episode.completed, + chapters: [], + transcriptUrls: [], + ); + + logger.debug('LocalDownload', 'Created local episode with GUID: ${localEpisode.guid}'); + logger.debug('LocalDownload', 'Episode title: ${localEpisode.title}'); + logger.debug('LocalDownload', 'Episode URL: ${localEpisode.contentUrl}'); + + final podcastBloc = Provider.of(context, listen: false); + + // First save the episode to the repository so it can be tracked + await podcastBloc.podcastService.saveEpisode(localEpisode); + logger.debug('LocalDownload', 'Episode saved to repository'); + + // Use the download service from podcast bloc + final success = await podcastBloc.downloadService.downloadEpisode(localEpisode); + logger.debug('LocalDownload', 'Download service result: $success'); + + if (success) { + updateLocalDownloadStatus(episode, true); + } + + return success; + } catch (e) { + logger.error('LocalDownload', 'Error in local download for episode: ${episode.episodeTitle}', e.toString()); + return false; + } + } + + /// Delete local download(s) for episode + static Future deleteLocalDownload( + BuildContext context, + PinepodsEpisode episode + ) async { + final logger = AppLogger(); + + try { + final podcastBloc = Provider.of(context, listen: false); + final guid = generateEpisodeGuid(episode); + + // Get all episodes and find matches with both new and old GUID formats + final allEpisodes = await podcastBloc.podcastService.repository.findAllEpisodes(); + final matchingEpisodes = allEpisodes.where((ep) => + ep.guid == guid || ep.guid.startsWith('${guid}_') + ).toList(); + + logger.debug('LocalDownload', 'Found ${matchingEpisodes.length} episodes to delete for $guid'); + + if (matchingEpisodes.isNotEmpty) { + // Delete ALL matching episodes (handles duplicates from old timestamp GUIDs) + for (final localEpisode in matchingEpisodes) { + logger.debug('LocalDownload', 'Deleting episode: ${localEpisode.guid}'); + await podcastBloc.podcastService.repository.deleteEpisode(localEpisode); + } + + // Update cache + updateLocalDownloadStatus(episode, false); + + return matchingEpisodes.length; + } else { + return 0; + } + } catch (e) { + logger.error('LocalDownload', 'Error deleting local download for episode: ${episode.episodeTitle}', e.toString()); + return 0; + } + } + + /// Show snackbar with message + static void showSnackBar(BuildContext context, String message, Color backgroundColor) { + ScaffoldMessenger.of(context).showSnackBar( + SnackBar( + content: Text(message), + backgroundColor: backgroundColor, + duration: const Duration(seconds: 2), + ), + ); + } +} \ No newline at end of file diff --git a/mobile/lib/ui/utils/player_utils.dart b/mobile/lib/ui/utils/player_utils.dart index 1cbfd92c..436dde45 100644 --- a/mobile/lib/ui/utils/player_utils.dart +++ b/mobile/lib/ui/utils/player_utils.dart @@ -5,6 +5,10 @@ import 'package:flutter/material.dart'; import 'package:pinepods_mobile/entities/app_settings.dart'; import 'package:pinepods_mobile/ui/podcast/now_playing.dart'; +import 'package:pinepods_mobile/bloc/settings/settings_bloc.dart'; +import 'package:pinepods_mobile/entities/pinepods_episode.dart'; +import 'package:pinepods_mobile/services/pinepods/pinepods_audio_service.dart'; +import 'package:provider/provider.dart'; /// If we have the 'show now playing upon play' option set to true, launch /// the [NowPlaying] widget automatically. @@ -19,4 +23,21 @@ void optionalShowNowPlaying(BuildContext context, AppSettings settings) { ), ); } +} + +/// Helper function to play a PinePods episode and automatically show the full screen player if enabled +Future playPinepodsEpisodeWithOptionalFullScreen( + BuildContext context, + PinepodsAudioService audioService, + PinepodsEpisode episode, { + bool resume = true, +}) async { + await audioService.playPinepodsEpisode( + pinepodsEpisode: episode, + resume: resume, + ); + + // Show full screen player if setting is enabled + final settingsBloc = Provider.of(context, listen: false); + optionalShowNowPlaying(context, settingsBloc.currentSettings); } \ No newline at end of file diff --git a/mobile/lib/ui/utils/position_utils.dart b/mobile/lib/ui/utils/position_utils.dart new file mode 100644 index 00000000..a90cbca9 --- /dev/null +++ b/mobile/lib/ui/utils/position_utils.dart @@ -0,0 +1,151 @@ +// lib/ui/utils/position_utils.dart +import 'package:flutter/material.dart'; +import 'package:pinepods_mobile/entities/pinepods_episode.dart'; +import 'package:pinepods_mobile/entities/episode.dart'; +import 'package:pinepods_mobile/entities/downloadable.dart'; +import 'package:pinepods_mobile/services/logging/app_logger.dart'; +import 'package:pinepods_mobile/services/pinepods/pinepods_service.dart'; +import 'package:pinepods_mobile/bloc/podcast/podcast_bloc.dart'; +import 'package:provider/provider.dart'; + +/// Utility class for managing episode position synchronization and display +class PositionUtils { + static final AppLogger _logger = AppLogger(); + + /// Generate consistent GUID for PinePods episodes + static String generateEpisodeGuid(PinepodsEpisode episode) { + return 'pinepods_${episode.episodeId}'; + } + + /// Get local position for episode from repository + static Future getLocalPosition(BuildContext context, PinepodsEpisode episode) async { + try { + final podcastBloc = Provider.of(context, listen: false); + final guid = generateEpisodeGuid(episode); + + // Get all episodes and find matches with both new and old GUID formats + final allEpisodes = await podcastBloc.podcastService.repository.findAllEpisodes(); + final matchingEpisodes = allEpisodes.where((ep) => + ep.guid == guid || ep.guid.startsWith('${guid}_') + ).toList(); + + if (matchingEpisodes.isNotEmpty) { + // Return the highest position from any matching episode (in case of duplicates) + final positions = matchingEpisodes.map((ep) => ep.position / 1000.0).toList(); + return positions.reduce((a, b) => a > b ? a : b); + } + + return null; + } catch (e) { + _logger.error('PositionUtils', 'Error getting local position for episode: ${episode.episodeTitle}', e.toString()); + return null; + } + } + + /// Get server position for episode (use existing data from feed) + static Future getServerPosition(PinepodsService pinepodsService, PinepodsEpisode episode, int userId) async { + return episode.listenDuration?.toDouble(); + } + + /// Get the best available position (furthest of local vs server) + static Future getBestPosition( + BuildContext context, + PinepodsService pinepodsService, + PinepodsEpisode episode, + int userId, + ) async { + // Get both positions in parallel + final futures = await Future.wait([ + getLocalPosition(context, episode), + getServerPosition(pinepodsService, episode, userId), + ]); + + final localPosition = futures[0] ?? 0.0; + final serverPosition = futures[1] ?? episode.listenDuration?.toDouble() ?? 0.0; + + final bestPosition = localPosition > serverPosition ? localPosition : serverPosition; + final isLocal = localPosition >= serverPosition; + + + return PositionInfo( + position: bestPosition, + isLocal: isLocal, + localPosition: localPosition, + serverPosition: serverPosition, + ); + } + + /// Enrich a single episode with the best available position + static Future enrichEpisodeWithBestPosition( + BuildContext context, + PinepodsService pinepodsService, + PinepodsEpisode episode, + int userId, + ) async { + final positionInfo = await getBestPosition(context, pinepodsService, episode, userId); + + // Create a new episode with updated position + return PinepodsEpisode( + podcastName: episode.podcastName, + episodeTitle: episode.episodeTitle, + episodePubDate: episode.episodePubDate, + episodeDescription: episode.episodeDescription, + episodeArtwork: episode.episodeArtwork, + episodeUrl: episode.episodeUrl, + episodeDuration: episode.episodeDuration, + listenDuration: positionInfo.position.round(), + episodeId: episode.episodeId, + completed: episode.completed, + saved: episode.saved, + queued: episode.queued, + downloaded: episode.downloaded, + isYoutube: episode.isYoutube, + podcastId: episode.podcastId, + ); + } + + /// Enrich a list of episodes with the best available positions + static Future> enrichEpisodesWithBestPositions( + BuildContext context, + PinepodsService pinepodsService, + List episodes, + int userId, + ) async { + _logger.info('PositionUtils', 'Enriching ${episodes.length} episodes with best positions'); + + final enrichedEpisodes = []; + + for (final episode in episodes) { + try { + final enrichedEpisode = await enrichEpisodeWithBestPosition( + context, + pinepodsService, + episode, + userId, + ); + enrichedEpisodes.add(enrichedEpisode); + } catch (e) { + _logger.warning('PositionUtils', 'Failed to enrich episode ${episode.episodeTitle}, using original: ${e.toString()}'); + enrichedEpisodes.add(episode); + } + } + + _logger.info('PositionUtils', 'Successfully enriched ${enrichedEpisodes.length} episodes'); + return enrichedEpisodes; + } +} + +/// Information about episode position +class PositionInfo { + final double position; + final bool isLocal; + final double localPosition; + final double serverPosition; + + PositionInfo({ + required this.position, + required this.isLocal, + required this.localPosition, + required this.serverPosition, + }); +} \ No newline at end of file diff --git a/mobile/lib/ui/widgets/episode_context_menu.dart b/mobile/lib/ui/widgets/episode_context_menu.dart index f10e21f0..ce20b9bd 100644 --- a/mobile/lib/ui/widgets/episode_context_menu.dart +++ b/mobile/lib/ui/widgets/episode_context_menu.dart @@ -8,9 +8,11 @@ class EpisodeContextMenu extends StatelessWidget { final VoidCallback? onRemoveSaved; final VoidCallback? onDownload; final VoidCallback? onLocalDownload; + final VoidCallback? onDeleteLocalDownload; final VoidCallback? onQueue; final VoidCallback? onMarkComplete; final VoidCallback? onDismiss; + final bool isDownloadedLocally; const EpisodeContextMenu({ Key? key, @@ -19,9 +21,11 @@ class EpisodeContextMenu extends StatelessWidget { this.onRemoveSaved, this.onDownload, this.onLocalDownload, + this.onDeleteLocalDownload, this.onQueue, this.onMarkComplete, this.onDismiss, + this.isDownloadedLocally = false, }) : super(key: key); @override @@ -90,9 +94,9 @@ class EpisodeContextMenu extends StatelessWidget { _buildMenuOption( context, - icon: Icons.file_download_outlined, - text: 'Download Locally', - onTap: onLocalDownload, + icon: isDownloadedLocally ? Icons.delete_forever_outlined : Icons.file_download_outlined, + text: isDownloadedLocally ? 'Delete Local Download' : 'Download Locally', + onTap: isDownloadedLocally ? onDeleteLocalDownload : onLocalDownload, ), _buildMenuOption( diff --git a/mobile/lib/ui/widgets/offline_episode_tile.dart b/mobile/lib/ui/widgets/offline_episode_tile.dart new file mode 100644 index 00000000..d0d43a7a --- /dev/null +++ b/mobile/lib/ui/widgets/offline_episode_tile.dart @@ -0,0 +1,162 @@ +// lib/ui/widgets/offline_episode_tile.dart +import 'package:flutter/material.dart'; +import 'package:pinepods_mobile/entities/episode.dart'; +import 'package:pinepods_mobile/ui/widgets/tile_image.dart'; +import 'package:pinepods_mobile/l10n/L.dart'; +import 'package:intl/intl.dart' show DateFormat; + +/// A custom episode tile specifically for offline downloaded episodes. +/// This bypasses the legacy PlayControl system and uses a custom play callback. +class OfflineEpisodeTile extends StatelessWidget { + final Episode episode; + final VoidCallback? onPlayPressed; + final VoidCallback? onTap; + + const OfflineEpisodeTile({ + super.key, + required this.episode, + this.onPlayPressed, + this.onTap, + }); + + @override + Widget build(BuildContext context) { + final textTheme = Theme.of(context).textTheme; + + return Card( + margin: const EdgeInsets.symmetric(horizontal: 12.0, vertical: 4.0), + child: ListTile( + onTap: onTap, + leading: Stack( + alignment: Alignment.bottomLeft, + children: [ + Opacity( + opacity: episode.played ? 0.5 : 1.0, + child: TileImage( + url: episode.thumbImageUrl ?? episode.imageUrl!, + size: 56.0, + highlight: episode.highlight, + ), + ), + // Progress indicator + SizedBox( + height: 5.0, + width: 56.0 * (episode.percentagePlayed / 100), + child: Container( + color: Theme.of(context).primaryColor, + ), + ), + ], + ), + title: Opacity( + opacity: episode.played ? 0.5 : 1.0, + child: Text( + episode.title!, + overflow: TextOverflow.ellipsis, + maxLines: 2, + style: textTheme.bodyMedium, + ), + ), + subtitle: Opacity( + opacity: episode.played ? 0.5 : 1.0, + child: _EpisodeSubtitle(episode), + ), + trailing: Row( + mainAxisSize: MainAxisSize.min, + children: [ + // Offline indicator + Container( + padding: const EdgeInsets.symmetric(horizontal: 6, vertical: 2), + decoration: BoxDecoration( + color: Colors.green[100], + borderRadius: BorderRadius.circular(8), + ), + child: Row( + mainAxisSize: MainAxisSize.min, + children: [ + Icon( + Icons.offline_pin, + size: 12, + color: Colors.green[700], + ), + const SizedBox(width: 4), + Text( + 'Offline', + style: TextStyle( + fontSize: 10, + color: Colors.green[700], + fontWeight: FontWeight.w500, + ), + ), + ], + ), + ), + const SizedBox(width: 8), + // Custom play button that bypasses legacy audio system + SizedBox( + width: 48, + height: 48, + child: IconButton( + onPressed: onPlayPressed, + icon: Icon( + Icons.play_arrow, + color: Theme.of(context).primaryColor, + ), + tooltip: L.of(context)?.play_button_label ?? 'Play', + ), + ), + ], + ), + ), + ); + } +} + +class _EpisodeSubtitle extends StatelessWidget { + final Episode episode; + final String date; + final Duration length; + + _EpisodeSubtitle(this.episode) + : date = episode.publicationDate == null + ? '' + : DateFormat(episode.publicationDate!.year == DateTime.now().year ? 'd MMM' : 'd MMM yyyy') + .format(episode.publicationDate!), + length = Duration(seconds: episode.duration); + + @override + Widget build(BuildContext context) { + final textTheme = Theme.of(context).textTheme; + var timeRemaining = episode.timeRemaining; + + String title; + + if (length.inSeconds > 0) { + if (length.inSeconds < 60) { + title = '$date • ${length.inSeconds} sec'; + } else { + title = '$date • ${length.inMinutes} min'; + } + } else { + title = date; + } + + if (timeRemaining.inSeconds > 0) { + if (timeRemaining.inSeconds < 60) { + title = '$title / ${timeRemaining.inSeconds} sec left'; + } else { + title = '$title / ${timeRemaining.inMinutes} min left'; + } + } + + return Padding( + padding: const EdgeInsets.only(top: 4.0), + child: Text( + title, + overflow: TextOverflow.ellipsis, + softWrap: false, + style: textTheme.bodySmall, + ), + ); + } +} \ No newline at end of file diff --git a/mobile/lib/ui/widgets/paginated_episode_list.dart b/mobile/lib/ui/widgets/paginated_episode_list.dart index 420cec08..6affd305 100644 --- a/mobile/lib/ui/widgets/paginated_episode_list.dart +++ b/mobile/lib/ui/widgets/paginated_episode_list.dart @@ -4,25 +4,26 @@ import 'package:pinepods_mobile/entities/pinepods_episode.dart'; import 'package:pinepods_mobile/entities/episode.dart'; import 'package:pinepods_mobile/ui/widgets/pinepods_episode_card.dart'; import 'package:pinepods_mobile/ui/widgets/episode_tile.dart'; +import 'package:pinepods_mobile/ui/widgets/offline_episode_tile.dart'; import 'package:pinepods_mobile/ui/widgets/shimmer_episode_tile.dart'; class PaginatedEpisodeList extends StatefulWidget { final List episodes; // Can be PinepodsEpisode or Episode final bool isServerEpisodes; + final bool isOfflineMode; // New flag for offline mode final Function(dynamic episode)? onEpisodeTap; final Function(dynamic episode, int globalIndex)? onEpisodeLongPress; final Function(dynamic episode)? onPlayPressed; - final Function(dynamic episode)? onDownloadPressed; final int pageSize; const PaginatedEpisodeList({ super.key, required this.episodes, required this.isServerEpisodes, + this.isOfflineMode = false, this.onEpisodeTap, this.onEpisodeLongPress, this.onPlayPressed, - this.onDownloadPressed, this.pageSize = 20, // Show 20 episodes at a time }); @@ -70,16 +71,26 @@ class _PaginatedEpisodeListState extends State { onPlayPressed: widget.onPlayPressed != null ? () => widget.onPlayPressed!(episode) : null, - onDownloadPressed: widget.onDownloadPressed != null - ? () => widget.onDownloadPressed!(episode) - : null, ); } else if (!widget.isServerEpisodes && episode is Episode) { - return EpisodeTile( - episode: episode, - download: false, - play: true, - ); + // Use offline episode tile when in offline mode to bypass legacy audio system + if (widget.isOfflineMode) { + return OfflineEpisodeTile( + episode: episode, + onTap: widget.onEpisodeTap != null + ? () => widget.onEpisodeTap!(episode) + : null, + onPlayPressed: widget.onPlayPressed != null + ? () => widget.onPlayPressed!(episode) + : null, + ); + } else { + return EpisodeTile( + episode: episode, + download: false, + play: true, + ); + } } return const SizedBox.shrink(); // Fallback diff --git a/mobile/lib/ui/widgets/pinepods_episode_card.dart b/mobile/lib/ui/widgets/pinepods_episode_card.dart index c23ff608..46611ed5 100644 --- a/mobile/lib/ui/widgets/pinepods_episode_card.dart +++ b/mobile/lib/ui/widgets/pinepods_episode_card.dart @@ -8,7 +8,6 @@ class PinepodsEpisodeCard extends StatelessWidget { final VoidCallback? onTap; final VoidCallback? onLongPress; final VoidCallback? onPlayPressed; - final VoidCallback? onDownloadPressed; const PinepodsEpisodeCard({ Key? key, @@ -16,7 +15,6 @@ class PinepodsEpisodeCard extends StatelessWidget { this.onTap, this.onLongPress, this.onPlayPressed, - this.onDownloadPressed, }) : super(key: key); @override @@ -129,25 +127,6 @@ class PinepodsEpisodeCard extends StatelessWidget { ), ), - // Download button - if (onDownloadPressed != null) - IconButton( - onPressed: onDownloadPressed, - icon: Icon( - episode.downloaded - ? Icons.download_done - : Icons.download_outlined, - color: episode.downloaded - ? Colors.green - : Colors.grey[600], - size: 20, - ), - padding: EdgeInsets.zero, - constraints: const BoxConstraints( - minWidth: 24, - minHeight: 24, - ), - ), const SizedBox(height: 4), Row( mainAxisSize: MainAxisSize.min, diff --git a/mobile/lib/ui/widgets/podcast_grid_tile.dart b/mobile/lib/ui/widgets/podcast_grid_tile.dart index 49fcf656..eed15cf1 100644 --- a/mobile/lib/ui/widgets/podcast_grid_tile.dart +++ b/mobile/lib/ui/widgets/podcast_grid_tile.dart @@ -3,9 +3,13 @@ // found in the LICENSE file. import 'package:pinepods_mobile/bloc/podcast/podcast_bloc.dart'; +import 'package:pinepods_mobile/bloc/settings/settings_bloc.dart'; import 'package:pinepods_mobile/entities/podcast.dart'; +import 'package:pinepods_mobile/entities/pinepods_search.dart'; import 'package:pinepods_mobile/ui/podcast/podcast_details.dart'; +import 'package:pinepods_mobile/ui/pinepods/podcast_details.dart'; import 'package:pinepods_mobile/ui/widgets/tile_image.dart'; +import 'package:pinepods_mobile/services/pinepods/pinepods_service.dart'; import 'package:flutter/material.dart'; import 'package:provider/provider.dart'; @@ -22,13 +26,8 @@ class PodcastGridTile extends StatelessWidget { final podcastBloc = Provider.of(context); return GestureDetector( - onTap: () { - Navigator.push( - context, - MaterialPageRoute( - settings: const RouteSettings(name: 'podcastdetails'), - builder: (context) => PodcastDetails(podcast, podcastBloc)), - ); + onTap: () async { + await _navigateToPodcastDetails(context, podcastBloc); }, child: Semantics( label: podcast.title, @@ -45,6 +44,84 @@ class PodcastGridTile extends StatelessWidget { ), ); } + + Future _navigateToPodcastDetails(BuildContext context, PodcastBloc podcastBloc) async { + // Check if this is a PinePods setup and if the podcast is already subscribed + final settingsBloc = Provider.of(context, listen: false); + final settings = settingsBloc.currentSettings; + + if (settings.pinepodsServer != null && + settings.pinepodsApiKey != null && + settings.pinepodsUserId != null) { + + // Check if podcast is already subscribed + final pinepodsService = PinepodsService(); + pinepodsService.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); + + try { + final isSubscribed = await pinepodsService.checkPodcastExists( + podcast.title, + podcast.url!, + settings.pinepodsUserId! + ); + + if (isSubscribed) { + // Get the internal PinePods database ID + final internalPodcastId = await pinepodsService.getPodcastId( + settings.pinepodsUserId!, + podcast.url!, + podcast.title + ); + + // Use PinePods podcast details for subscribed podcasts + final unifiedPodcast = UnifiedPinepodsPodcast( + id: internalPodcastId ?? 0, + indexId: 0, // Default for subscribed podcasts + title: podcast.title, + url: podcast.url ?? '', + originalUrl: podcast.url ?? '', + link: podcast.link ?? '', + description: podcast.description ?? '', + author: podcast.copyright ?? '', + ownerName: podcast.copyright ?? '', + image: podcast.imageUrl ?? '', + artwork: podcast.imageUrl ?? '', + lastUpdateTime: 0, + explicit: false, + episodeCount: 0, // Will be loaded + ); + + if (context.mounted) { + Navigator.push( + context, + MaterialPageRoute( + settings: const RouteSettings(name: 'pinepodspodcastdetails'), + builder: (context) => PinepodsPodcastDetails( + podcast: unifiedPodcast, + isFollowing: true, + ), + ), + ); + } + return; + } + } catch (e) { + // If check fails, fall through to standard podcast details + print('Error checking subscription status: $e'); + } + } + + // Use standard podcast details for non-subscribed or non-PinePods setups + if (context.mounted) { + Navigator.push( + context, + MaterialPageRoute( + settings: const RouteSettings(name: 'podcastdetails'), + builder: (context) => PodcastDetails(podcast, podcastBloc), + ), + ); + } + } } class PodcastTitledGridTile extends StatelessWidget { @@ -61,13 +138,8 @@ class PodcastTitledGridTile extends StatelessWidget { final theme = Theme.of(context); return GestureDetector( - onTap: () { - Navigator.push( - context, - MaterialPageRoute( - settings: const RouteSettings(name: 'podcastdetails'), - builder: (context) => PodcastDetails(podcast, podcastBloc)), - ); + onTap: () async { + await _navigateToPodcastDetails(context, podcastBloc); }, child: GridTile( child: Hero( @@ -97,4 +169,82 @@ class PodcastTitledGridTile extends StatelessWidget { ), ); } + + Future _navigateToPodcastDetails(BuildContext context, PodcastBloc podcastBloc) async { + // Check if this is a PinePods setup and if the podcast is already subscribed + final settingsBloc = Provider.of(context, listen: false); + final settings = settingsBloc.currentSettings; + + if (settings.pinepodsServer != null && + settings.pinepodsApiKey != null && + settings.pinepodsUserId != null) { + + // Check if podcast is already subscribed + final pinepodsService = PinepodsService(); + pinepodsService.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); + + try { + final isSubscribed = await pinepodsService.checkPodcastExists( + podcast.title, + podcast.url!, + settings.pinepodsUserId! + ); + + if (isSubscribed) { + // Get the internal PinePods database ID + final internalPodcastId = await pinepodsService.getPodcastId( + settings.pinepodsUserId!, + podcast.url!, + podcast.title + ); + + // Use PinePods podcast details for subscribed podcasts + final unifiedPodcast = UnifiedPinepodsPodcast( + id: internalPodcastId ?? 0, + indexId: 0, // Default for subscribed podcasts + title: podcast.title, + url: podcast.url ?? '', + originalUrl: podcast.url ?? '', + link: podcast.link ?? '', + description: podcast.description ?? '', + author: podcast.copyright ?? '', + ownerName: podcast.copyright ?? '', + image: podcast.imageUrl ?? '', + artwork: podcast.imageUrl ?? '', + lastUpdateTime: 0, + explicit: false, + episodeCount: 0, // Will be loaded + ); + + if (context.mounted) { + Navigator.push( + context, + MaterialPageRoute( + settings: const RouteSettings(name: 'pinepodspodcastdetails'), + builder: (context) => PinepodsPodcastDetails( + podcast: unifiedPodcast, + isFollowing: true, + ), + ), + ); + } + return; + } + } catch (e) { + // If check fails, fall through to standard podcast details + print('Error checking subscription status: $e'); + } + } + + // Use standard podcast details for non-subscribed or non-PinePods setups + if (context.mounted) { + Navigator.push( + context, + MaterialPageRoute( + settings: const RouteSettings(name: 'podcastdetails'), + builder: (context) => PodcastDetails(podcast, podcastBloc), + ), + ); + } + } } diff --git a/mobile/lib/ui/widgets/podcast_tile.dart b/mobile/lib/ui/widgets/podcast_tile.dart index ecd11d4f..e83a1e2b 100644 --- a/mobile/lib/ui/widgets/podcast_tile.dart +++ b/mobile/lib/ui/widgets/podcast_tile.dart @@ -3,9 +3,13 @@ // found in the LICENSE file. import 'package:pinepods_mobile/bloc/podcast/podcast_bloc.dart'; +import 'package:pinepods_mobile/bloc/settings/settings_bloc.dart'; import 'package:pinepods_mobile/entities/podcast.dart'; +import 'package:pinepods_mobile/entities/pinepods_search.dart'; import 'package:pinepods_mobile/ui/podcast/podcast_details.dart'; +import 'package:pinepods_mobile/ui/pinepods/podcast_details.dart'; import 'package:pinepods_mobile/ui/widgets/tile_image.dart'; +import 'package:pinepods_mobile/services/pinepods/pinepods_service.dart'; import 'package:flutter/material.dart'; import 'package:provider/provider.dart'; @@ -22,13 +26,8 @@ class PodcastTile extends StatelessWidget { final podcastBloc = Provider.of(context); return ListTile( - onTap: () { - Navigator.push( - context, - MaterialPageRoute( - settings: const RouteSettings(name: 'podcastdetails'), - builder: (context) => PodcastDetails(podcast, podcastBloc)), - ); + onTap: () async { + await _navigateToPodcastDetails(context, podcastBloc); }, minVerticalPadding: 9, leading: ExcludeSemantics( @@ -56,4 +55,82 @@ class PodcastTile extends StatelessWidget { isThreeLine: false, ); } + + Future _navigateToPodcastDetails(BuildContext context, PodcastBloc podcastBloc) async { + // Check if this is a PinePods setup and if the podcast is already subscribed + final settingsBloc = Provider.of(context, listen: false); + final settings = settingsBloc.currentSettings; + + if (settings.pinepodsServer != null && + settings.pinepodsApiKey != null && + settings.pinepodsUserId != null) { + + // Check if podcast is already subscribed + final pinepodsService = PinepodsService(); + pinepodsService.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!); + + try { + final isSubscribed = await pinepodsService.checkPodcastExists( + podcast.title, + podcast.url!, + settings.pinepodsUserId! + ); + + if (isSubscribed) { + // Get the internal PinePods database ID + final internalPodcastId = await pinepodsService.getPodcastId( + settings.pinepodsUserId!, + podcast.url!, + podcast.title + ); + + // Use PinePods podcast details for subscribed podcasts + final unifiedPodcast = UnifiedPinepodsPodcast( + id: internalPodcastId ?? 0, + indexId: 0, // Default for subscribed podcasts + title: podcast.title, + url: podcast.url ?? '', + originalUrl: podcast.url ?? '', + link: podcast.link ?? '', + description: podcast.description ?? '', + author: podcast.copyright ?? '', + ownerName: podcast.copyright ?? '', + image: podcast.imageUrl ?? '', + artwork: podcast.imageUrl ?? '', + lastUpdateTime: 0, + explicit: false, + episodeCount: 0, // Will be loaded + ); + + if (context.mounted) { + Navigator.push( + context, + MaterialPageRoute( + settings: const RouteSettings(name: 'pinepodspodcastdetails'), + builder: (context) => PinepodsPodcastDetails( + podcast: unifiedPodcast, + isFollowing: true, + ), + ), + ); + } + return; + } + } catch (e) { + // If check fails, fall through to standard podcast details + print('Error checking subscription status: $e'); + } + } + + // Use standard podcast details for non-subscribed or non-PinePods setups + if (context.mounted) { + Navigator.push( + context, + MaterialPageRoute( + settings: const RouteSettings(name: 'podcastdetails'), + builder: (context) => PodcastDetails(podcast, podcastBloc), + ), + ); + } + } } diff --git a/mobile/lib/ui/widgets/server_error_page.dart b/mobile/lib/ui/widgets/server_error_page.dart new file mode 100644 index 00000000..1c8d12a0 --- /dev/null +++ b/mobile/lib/ui/widgets/server_error_page.dart @@ -0,0 +1,242 @@ +// lib/ui/widgets/server_error_page.dart +import 'package:flutter/material.dart'; + +class ServerErrorPage extends StatelessWidget { + final String? errorMessage; + final VoidCallback? onRetry; + final String? title; + final String? subtitle; + final bool showLogo; + + const ServerErrorPage({ + Key? key, + this.errorMessage, + this.onRetry, + this.title, + this.subtitle, + this.showLogo = true, + }) : super(key: key); + + @override + Widget build(BuildContext context) { + return Container( + padding: const EdgeInsets.symmetric(horizontal: 32.0, vertical: 48.0), + child: Column( + mainAxisAlignment: MainAxisAlignment.center, + crossAxisAlignment: CrossAxisAlignment.center, + children: [ + // PinePods Logo + if (showLogo) ...[ + ClipRRect( + borderRadius: BorderRadius.circular(16), + child: Image.asset( + 'assets/images/pinepods-logo.png', + width: 120, + height: 120, + fit: BoxFit.contain, + errorBuilder: (context, error, stackTrace) { + // Fallback if logo image fails to load + return Container( + width: 120, + height: 120, + decoration: BoxDecoration( + color: Theme.of(context).primaryColor.withOpacity(0.1), + borderRadius: BorderRadius.circular(16), + ), + child: Icon( + Icons.podcasts, + size: 64, + color: Theme.of(context).primaryColor, + ), + ); + }, + ), + ), + const SizedBox(height: 32), + ], + + // Error Icon + Container( + padding: const EdgeInsets.all(16), + decoration: BoxDecoration( + color: Theme.of(context).colorScheme.errorContainer.withOpacity(0.1), + shape: BoxShape.circle, + ), + child: Icon( + Icons.cloud_off_rounded, + size: 48, + color: Theme.of(context).colorScheme.error, + ), + ), + const SizedBox(height: 24), + + // Title + Text( + title ?? 'Server Unavailable', + style: Theme.of(context).textTheme.headlineSmall?.copyWith( + fontWeight: FontWeight.bold, + color: Theme.of(context).colorScheme.onSurface, + ), + textAlign: TextAlign.center, + ), + const SizedBox(height: 12), + + // Subtitle + Text( + subtitle ?? 'Unable to connect to the PinePods server', + style: Theme.of(context).textTheme.bodyLarge?.copyWith( + color: Theme.of(context).colorScheme.onSurface.withOpacity(0.7), + ), + textAlign: TextAlign.center, + ), + const SizedBox(height: 20), + + // Error Message (if provided) + if (errorMessage != null && errorMessage!.isNotEmpty) ...[ + Container( + width: double.infinity, + padding: const EdgeInsets.all(16), + decoration: BoxDecoration( + color: Theme.of(context).colorScheme.errorContainer.withOpacity(0.1), + borderRadius: BorderRadius.circular(12), + border: Border.all( + color: Theme.of(context).colorScheme.error.withOpacity(0.2), + ), + ), + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + Row( + children: [ + Icon( + Icons.info_outline, + size: 16, + color: Theme.of(context).colorScheme.error, + ), + const SizedBox(width: 6), + Text( + 'Error Details', + style: Theme.of(context).textTheme.labelMedium?.copyWith( + fontWeight: FontWeight.w600, + color: Theme.of(context).colorScheme.error, + ), + ), + ], + ), + const SizedBox(height: 8), + Text( + errorMessage!, + style: Theme.of(context).textTheme.bodySmall?.copyWith( + color: Theme.of(context).colorScheme.onErrorContainer, + ), + ), + ], + ), + ), + const SizedBox(height: 24), + ], + + // Troubleshooting suggestions + Container( + width: double.infinity, + padding: const EdgeInsets.all(16), + decoration: BoxDecoration( + color: Theme.of(context).colorScheme.primaryContainer.withOpacity(0.1), + borderRadius: BorderRadius.circular(12), + border: Border.all( + color: Theme.of(context).colorScheme.primary.withOpacity(0.2), + ), + ), + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + Row( + children: [ + Icon( + Icons.lightbulb_outline, + size: 16, + color: Theme.of(context).colorScheme.primary, + ), + const SizedBox(width: 6), + Text( + 'Troubleshooting Tips', + style: Theme.of(context).textTheme.labelMedium?.copyWith( + fontWeight: FontWeight.w600, + color: Theme.of(context).colorScheme.primary, + ), + ), + ], + ), + const SizedBox(height: 8), + _buildTroubleshootingTip(context, '• Check your internet connection'), + _buildTroubleshootingTip(context, '• Verify server settings in the app'), + _buildTroubleshootingTip(context, '• Ensure the PinePods server is running'), + _buildTroubleshootingTip(context, '• Contact your administrator if the issue persists'), + ], + ), + ), + + const SizedBox(height: 32), + + // Action Buttons + if (onRetry != null) + SizedBox( + width: double.infinity, + child: FilledButton.icon( + onPressed: onRetry, + icon: const Icon(Icons.refresh), + label: const Text('Retry'), + style: FilledButton.styleFrom( + padding: const EdgeInsets.symmetric(vertical: 12), + ), + ), + ), + ], + ), + ); + } + + Widget _buildTroubleshootingTip(BuildContext context, String tip) { + return Padding( + padding: const EdgeInsets.only(bottom: 4), + child: Text( + tip, + style: Theme.of(context).textTheme.bodySmall?.copyWith( + color: Theme.of(context).colorScheme.onPrimaryContainer, + ), + ), + ); + } +} + +/// A specialized server error page for SliverFillRemaining usage +class SliverServerErrorPage extends StatelessWidget { + final String? errorMessage; + final VoidCallback? onRetry; + final String? title; + final String? subtitle; + final bool showLogo; + + const SliverServerErrorPage({ + Key? key, + this.errorMessage, + this.onRetry, + this.title, + this.subtitle, + this.showLogo = true, + }) : super(key: key); + + @override + Widget build(BuildContext context) { + return SliverFillRemaining( + hasScrollBody: false, + child: ServerErrorPage( + errorMessage: errorMessage, + onRetry: onRetry, + title: title, + subtitle: subtitle, + showLogo: showLogo, + ), + ); + } +} \ No newline at end of file diff --git a/mobile/metadata/en-US/full_description.txt b/mobile/metadata/en-US/full_description.txt index 2745db12..a0690f21 100644 --- a/mobile/metadata/en-US/full_description.txt +++ b/mobile/metadata/en-US/full_description.txt @@ -3,6 +3,7 @@ PinePods is a complete podcast management solution that allows you to host your This is the official Pinepods companion app for Android. Features of Pinepods: + • Self-hosted podcast server synchronization that syncs everything between your devices • Beautiful, intuitive mobile interface with loads of themes that sync between devices and platforms • Download episodes for offline listening or archiving on your server diff --git a/mobile/pubspec.lock b/mobile/pubspec.lock index 5a8208c2..5b605083 100644 --- a/mobile/pubspec.lock +++ b/mobile/pubspec.lock @@ -558,7 +558,7 @@ packages: source: hosted version: "0.15.6" http: - dependency: transitive + dependency: "direct main" description: name: http sha256: "2c11f3f94c687ee9bad77c171151672986360b2b001d109814ee7140b2cf261b" @@ -781,6 +781,22 @@ packages: url: "https://pub.dev" source: hosted version: "2.2.0" + package_info_plus: + dependency: "direct main" + description: + name: package_info_plus + sha256: "7976bfe4c583170d6cdc7077e3237560b364149fcd268b5f53d95a991963b191" + url: "https://pub.dev" + source: hosted + version: "8.3.0" + package_info_plus_platform_interface: + dependency: transitive + description: + name: package_info_plus_platform_interface + sha256: "6c935fb612dff8e3cc9632c2b301720c77450a126114126ffaafe28d2e87956c" + url: "https://pub.dev" + source: hosted + version: "3.2.0" path: dependency: "direct main" description: @@ -1394,6 +1410,38 @@ packages: url: "https://pub.dev" source: hosted version: "3.0.3" + webview_flutter: + dependency: "direct main" + description: + name: webview_flutter + sha256: c3e4fe614b1c814950ad07186007eff2f2e5dd2935eba7b9a9a1af8e5885f1ba + url: "https://pub.dev" + source: hosted + version: "4.13.0" + webview_flutter_android: + dependency: transitive + description: + name: webview_flutter_android + sha256: "0a42444056b24ed832bdf3442d65c5194f6416f7e782152384944053c2ecc9a3" + url: "https://pub.dev" + source: hosted + version: "4.10.0" + webview_flutter_platform_interface: + dependency: transitive + description: + name: webview_flutter_platform_interface + sha256: "63d26ee3aca7256a83ccb576a50272edd7cfc80573a4305caa98985feb493ee0" + url: "https://pub.dev" + source: hosted + version: "2.14.0" + webview_flutter_wkwebview: + dependency: transitive + description: + name: webview_flutter_wkwebview + sha256: fb46db8216131a3e55bcf44040ca808423539bc6732e7ed34fb6d8044e3d512f + url: "https://pub.dev" + source: hosted + version: "3.23.0" win32: dependency: transitive description: diff --git a/mobile/pubspec.yaml b/mobile/pubspec.yaml index f0ce5b76..f8c2d14c 100644 --- a/mobile/pubspec.yaml +++ b/mobile/pubspec.yaml @@ -1,7 +1,7 @@ name: pinepods_mobile description: Pinepods Podcast Server -version: 0.7.10+20250714 +version: 0.8.1+20252203 environment: sdk: ">=3.8.0 <4.0.0" @@ -28,12 +28,14 @@ dependencies: flutter_launcher_icons: ^0.14.2 flutter_spinkit: ^5.0.0 html: ^0.15.0 + http: ^1.2.2 intl: ^0.20.2 intl_translation: ^0.20.0 just_audio: ^0.10.4 logging: ^1.0.2 meta: ^1.17.0 mp3_info: ^0.2.0 + package_info_plus: ^8.1.2 path: ^1.8.3 path_provider: ^2.1.4 path_provider_platform_interface: ^2.0.4 @@ -48,6 +50,7 @@ dependencies: shared_preferences: ^2.3.4 sliver_tools: ^0.2.12 url_launcher: ^6.3.1 + webview_flutter: ^4.9.0 xml: ^6.5.0 flutter: @@ -101,15 +104,15 @@ flutter: - assets/images/favicon.png - assets/images/icon-192.png - assets/images/pinepods-logo.png - - assets/images/1.jpg - - assets/images/2.jpg - - assets/images/3.jpg - - assets/images/4.jpg - - assets/images/5.jpg - - assets/images/6.jpg - - assets/images/7.jpg - - assets/images/8.jpg - - assets/images/9.jpg + - assets/images/1.webp + - assets/images/2.webp + - assets/images/3.webp + - assets/images/4.webp + - assets/images/5.webp + - assets/images/6.webp + - assets/images/7.webp + - assets/images/8.webp + - assets/images/9.webp # Certificate authorities - assets/ca/lets-encrypt-r3.pem diff --git a/requirements.txt b/requirements.txt index 0d9050d6..eb47f401 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -mysql-connector-python +mariadb python-dateutil python-dateutil passlib diff --git a/rust-api/Cargo.lock b/rust-api/Cargo.lock index 870378d5..e61fd1e9 100644 --- a/rust-api/Cargo.lock +++ b/rust-api/Cargo.lock @@ -2,15 +2,6 @@ # It is not intended for manual editing. version = 4 -[[package]] -name = "addr2line" -version = "0.24.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" -dependencies = [ - "gimli", -] - [[package]] name = "adler2" version = "2.0.1" @@ -53,12 +44,6 @@ version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" -[[package]] -name = "android-tzdata" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" - [[package]] name = "android_system_properties" version = "0.1.5" @@ -70,9 +55,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.98" +version = "1.0.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" +checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" [[package]] name = "arbitrary" @@ -93,9 +78,9 @@ dependencies = [ [[package]] name = "argon2" -version = "0.6.0-rc.0" +version = "0.6.0-rc.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99d62242539f39ae10c6654d86121e152344e24329c4adb921d672b6369f47a2" +checksum = "d911686206fdd816a61ed5226535997149b0fc7726e37fee46f407c9ff82ed87" dependencies = [ "base64ct", "blake2", @@ -152,9 +137,9 @@ dependencies = [ [[package]] name = "async-trait" -version = "0.1.88" +version = "0.1.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e539d3fca749fcee5236ab05e93a52867dd549cc157c8cb7f99595f3cedffdb5" +checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" dependencies = [ "proc-macro2", "quote", @@ -205,11 +190,35 @@ dependencies = [ "arrayvec", ] +[[package]] +name = "aws-lc-rs" +version = "1.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fcc8f365936c834db5514fc45aee5b1202d677e6b40e48468aaaa8183ca8c7" +dependencies = [ + "aws-lc-sys", + "untrusted 0.7.1", + "zeroize", +] + +[[package]] +name = "aws-lc-sys" +version = "0.29.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61b1d86e7705efe1be1b569bab41d4fa1e14e220b60a160f78de2db687add079" +dependencies = [ + "bindgen", + "cc", + "cmake", + "dunce", + "fs_extra", +] + [[package]] name = "axum" -version = "0.8.4" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "021e862c184ae977658b36c4500f7feac3221ca5da43e3f25bd04ab6c79a29b5" +checksum = "8a18ed336352031311f4e0b4dd2ff392d4fbb370777c9d18d7fc9d7359f73871" dependencies = [ "axum-core", "axum-macros", @@ -229,8 +238,7 @@ dependencies = [ "multer", "percent-encoding", "pin-project-lite", - "rustversion", - "serde", + "serde_core", "serde_json", "serde_path_to_error", "serde_urlencoded", @@ -246,9 +254,9 @@ dependencies = [ [[package]] name = "axum-core" -version = "0.5.2" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68464cd0412f486726fb3373129ef5d2993f90c34bc2bc1c1e9943b2f4fc7ca6" +checksum = "59446ce19cd142f8833f856eb31f3eb097812d1479ab224f54d72428ca21ea22" dependencies = [ "bytes", "futures-core", @@ -257,7 +265,6 @@ dependencies = [ "http-body-util", "mime", "pin-project-lite", - "rustversion", "sync_wrapper", "tower-layer", "tower-service", @@ -275,21 +282,6 @@ dependencies = [ "syn", ] -[[package]] -name = "backtrace" -version = "0.3.75" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6806a6321ec58106fea15becdad98371e28d92ccbc7c8f1b3b6dd724fe8f1002" -dependencies = [ - "addr2line", - "cfg-if", - "libc", - "miniz_oxide", - "object", - "rustc-demangle", - "windows-targets 0.52.6", -] - [[package]] name = "base32" version = "0.5.1" @@ -316,9 +308,9 @@ checksum = "55248b47b0caf0546f7988906588779981c43bb1bc9d0c44087278f80cdb44ba" [[package]] name = "bigdecimal" -version = "0.4.8" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a22f228ab7a1b23027ccc6c350b72868017af7ea8356fbdf19f8d991c690013" +checksum = "560f42649de9fa436b73517378a147ec21f6c997a546581df4b4b31677828934" dependencies = [ "autocfg", "libm", @@ -328,16 +320,33 @@ dependencies = [ ] [[package]] -name = "bit_field" -version = "0.10.2" +name = "bindgen" +version = "0.69.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc827186963e592360843fb5ba4b973e145841266c1357f7180c43526f2e5b61" +checksum = "271383c67ccabffb7381723dea0672a673f292304fcb45c01cc648c7a8d58088" +dependencies = [ + "bitflags", + "cexpr", + "clang-sys", + "itertools", + "lazy_static", + "lazycell", + "log", + "prettyplease", + "proc-macro2", + "quote", + "regex", + "rustc-hash 1.1.0", + "shlex", + "syn", + "which", +] [[package]] -name = "bitflags" -version = "1.3.2" +name = "bit_field" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" +checksum = "dc827186963e592360843fb5ba4b973e145841266c1357f7180c43526f2e5b61" [[package]] name = "bitflags" @@ -356,11 +365,11 @@ checksum = "6099cdc01846bc367c4e7dd630dc5966dccf36b652fae7a74e17b640411a91b2" [[package]] name = "blake2" -version = "0.11.0-rc.0" +version = "0.11.0-rc.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce3d950855224a23299348898f8a2127860e1afea78df3e51deebb89d1cb2f8f" +checksum = "1edac47499deef695d9431bf241c75ea29f4cf3dcb78d39e19b31515e4ad3b08" dependencies = [ - "digest 0.11.0-rc.0", + "digest 0.11.0-rc.3", ] [[package]] @@ -374,9 +383,9 @@ dependencies = [ [[package]] name = "block-buffer" -version = "0.11.0-rc.4" +version = "0.11.0-rc.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a229bfd78e4827c91b9b95784f69492c1b77c1ab75a45a8a037b139215086f94" +checksum = "e9ef36a6fcdb072aa548f3da057640ec10859eb4e91ddf526ee648d50c76a949" dependencies = [ "hybrid-array", ] @@ -428,6 +437,15 @@ dependencies = [ "shlex", ] +[[package]] +name = "cexpr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" +dependencies = [ + "nom 7.1.3", +] + [[package]] name = "cfg-expr" version = "0.15.8" @@ -452,17 +470,26 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" [[package]] name = "chrono" -version = "0.4.41" +version = "0.4.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c469d952047f47f91b68d1cba3f10d63c11d73e4636f24f08daf0278abf01c4d" +checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2" dependencies = [ - "android-tzdata", "iana-time-zone", "js-sys", "num-traits", "serde", "wasm-bindgen", - "windows-link", + "windows-link 0.2.1", +] + +[[package]] +name = "chrono-tz" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6139a8597ed92cf816dfb33f5dd6cf0bb93a6adc938f11039f371bc5bcd26c3" +dependencies = [ + "chrono", + "phf", ] [[package]] @@ -475,6 +502,26 @@ dependencies = [ "stacker", ] +[[package]] +name = "clang-sys" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4" +dependencies = [ + "glob", + "libc", + "libloading", +] + +[[package]] +name = "cmake" +version = "0.1.54" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7caa3f9de89ddbe2c607f4101924c5abec803763ae9534e4f4d7d8f84aa81f0" +dependencies = [ + "cc", +] + [[package]] name = "color_quant" version = "1.1.0" @@ -506,9 +553,9 @@ dependencies = [ [[package]] name = "config" -version = "0.15.13" +version = "0.15.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b1eb4fb07bc7f012422df02766c7bd5971effb894f573865642f06fa3265440" +checksum = "180e549344080374f9b32ed41bf3b6b57885ff6a289367b3dbc10eea8acc1918" dependencies = [ "async-trait", "convert_case", @@ -516,9 +563,10 @@ dependencies = [ "pathdiff", "ron", "rust-ini", - "serde", + "serde-untagged", + "serde_core", "serde_json", - "toml 0.9.2", + "toml 0.9.8", "winnow", "yaml-rust2", ] @@ -564,6 +612,35 @@ dependencies = [ "unicode-segmentation", ] +[[package]] +name = "cookie" +version = "0.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ddef33a339a91ea89fb53151bd0a4689cfce27055c291dfa69945475d22c747" +dependencies = [ + "percent-encoding", + "time", + "version_check", +] + +[[package]] +name = "cookie_store" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2eac901828f88a5241ee0600950ab981148a18f2f756900ffba1b125ca6a3ef9" +dependencies = [ + "cookie", + "document-features", + "idna", + "log", + "publicsuffix", + "serde", + "serde_derive", + "serde_json", + "time", + "url", +] + [[package]] name = "core-foundation" version = "0.9.4" @@ -615,11 +692,13 @@ dependencies = [ [[package]] name = "croner" -version = "2.2.0" +version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c344b0690c1ad1c7176fe18eb173e0c927008fdaaa256e40dfd43ddd149c0843" +checksum = "4aa42bcd3d846ebf66e15bd528d1087f75d1c6c1c66ebff626178a106353c576" dependencies = [ "chrono", + "derive_builder", + "strum", ] [[package]] @@ -674,13 +753,48 @@ dependencies = [ [[package]] name = "crypto-common" -version = "0.2.0-rc.3" +version = "0.2.0-rc.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a23fa214dea9efd4dacee5a5614646b30216ae0f05d4bb51bafb50e9da1c5be" +checksum = "6a8235645834fbc6832939736ce2f2d08192652269e11010a6240f61b908a1c6" dependencies = [ "hybrid-array", ] +[[package]] +name = "darling" +version = "0.20.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.20.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d00b9596d185e565c2207a0b01f8bd1a135483d02d9b7b0a54b11da8d53412e" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn", +] + +[[package]] +name = "darling_macro" +version = "0.20.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" +dependencies = [ + "darling_core", + "quote", + "syn", +] + [[package]] name = "data-encoding" version = "2.9.0" @@ -707,6 +821,37 @@ dependencies = [ "powerfmt", ] +[[package]] +name = "derive_builder" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "507dfb09ea8b7fa618fcf76e953f4f5e192547945816d5358edffe39f6f94947" +dependencies = [ + "derive_builder_macro", +] + +[[package]] +name = "derive_builder_core" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d5bcf7b024d6835cfb3d473887cd966994907effbe9227e8c8219824d06c4e8" +dependencies = [ + "darling", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "derive_builder_macro" +version = "0.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c" +dependencies = [ + "derive_builder_core", + "syn", +] + [[package]] name = "digest" version = "0.10.7" @@ -721,12 +866,12 @@ dependencies = [ [[package]] name = "digest" -version = "0.11.0-rc.0" +version = "0.11.0-rc.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "460dd7f37e4950526b54a5a6b1f41b6c8e763c58eb9a8fc8fc05ba5c2f44ca7b" +checksum = "dac89f8a64533a9b0eaa73a68e424db0fb1fd6271c74cc0125336a05f090568d" dependencies = [ - "block-buffer 0.11.0-rc.4", - "crypto-common 0.2.0-rc.3", + "block-buffer 0.11.0-rc.5", + "crypto-common 0.2.0-rc.4", "subtle", ] @@ -750,12 +895,27 @@ dependencies = [ "const-random", ] +[[package]] +name = "document-features" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95249b50c6c185bee49034bcb378a49dc2b5dff0be90ff6616d31d64febab05d" +dependencies = [ + "litrs", +] + [[package]] name = "dotenvy" version = "0.15.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" +[[package]] +name = "dunce" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" + [[package]] name = "either" version = "1.15.0" @@ -816,6 +976,16 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" +[[package]] +name = "erased-serde" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e004d887f51fcb9fef17317a2f3525c887d8aa3f4f50fed920816a688284a5b7" +dependencies = [ + "serde", + "typeid", +] + [[package]] name = "errno" version = "0.3.13" @@ -869,6 +1039,26 @@ version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" +[[package]] +name = "fax" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f05de7d48f37cd6730705cbca900770cab77a89f413d23e100ad7fad7795a0ab" +dependencies = [ + "fax_derive", +] + +[[package]] +name = "fax_derive" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0aca10fb742cb43f9e7bb8467c91aa9bcb8e3ffbc6a6f7389bb93ffc920577d" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "fdeflate" version = "0.3.7" @@ -958,13 +1148,19 @@ checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" [[package]] name = "form_urlencoded" -version = "1.2.1" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" dependencies = [ "percent-encoding", ] +[[package]] +name = "fs_extra" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" + [[package]] name = "futures" version = "0.3.31" @@ -1113,10 +1309,10 @@ dependencies = [ ] [[package]] -name = "gimli" -version = "0.31.1" +name = "glob" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" +checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" [[package]] name = "h2" @@ -1270,22 +1466,23 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "hybrid-array" -version = "0.3.1" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "891d15931895091dea5c47afa5b3c9a01ba634b311919fd4d41388fa0e3d76af" +checksum = "f471e0a81b2f90ffc0cb2f951ae04da57de8baa46fa99112b062a5173a5088d0" dependencies = [ "typenum", ] [[package]] name = "hyper" -version = "1.6.0" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80" +checksum = "eb3aa54a13a0dfe7fbe3a59e0c76093041720fdc77b110cc0fc260fafb4dc51e" dependencies = [ + "atomic-waker", "bytes", "futures-channel", - "futures-util", + "futures-core", "h2", "http", "http-body", @@ -1293,6 +1490,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", + "pin-utils", "smallvec", "tokio", "want", @@ -1473,16 +1671,22 @@ version = "1.16.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aadb14a5ba1a0d58ecd4a29bfc9b8f1d119eee24aa01a62c1ec93eb9630a1d86" dependencies = [ - "bitflags 2.9.1", + "bitflags", "byteorder", "flate2", ] +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + [[package]] name = "idna" -version = "1.0.3" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" dependencies = [ "idna_adapter", "smallvec", @@ -1501,9 +1705,9 @@ dependencies = [ [[package]] name = "image" -version = "0.25.6" +version = "0.25.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db35664ce6b9810857a38a906215e75a9c879f0696556a39f59c62829710251a" +checksum = "529feb3e6769d234375c4cf1ee2ce713682b8e76538cb13f9fc23e1400a591e7" dependencies = [ "bytemuck", "byteorder-lite", @@ -1511,6 +1715,7 @@ dependencies = [ "exr", "gif", "image-webp", + "moxcms", "num-traits", "png", "qoi", @@ -1559,17 +1764,6 @@ dependencies = [ "syn", ] -[[package]] -name = "io-uring" -version = "0.7.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b86e202f00093dcba4275d4636b93ef9dd75d025ae560d2521b45ea28ab49013" -dependencies = [ - "bitflags 2.9.1", - "cfg-if", - "libc", -] - [[package]] name = "ipnet" version = "2.11.0" @@ -1611,12 +1805,6 @@ dependencies = [ "libc", ] -[[package]] -name = "jpeg-decoder" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00810f1d8b74be64b13dbf3db89ac67740615d6c891f0e7b6179326533011a07" - [[package]] name = "js-sys" version = "0.3.77" @@ -1640,16 +1828,18 @@ dependencies = [ [[package]] name = "jsonwebtoken" -version = "9.3.1" +version = "10.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a87cc7a48537badeae96744432de36f4be2b4a34a05a5ef32e9dd8a1c169dde" +checksum = "3d119c6924272d16f0ab9ce41f7aa0bfef9340c00b0bb7ca3dd3b263d4a9150b" dependencies = [ + "aws-lc-rs", "base64 0.22.1", + "getrandom 0.2.16", "js-sys", "pem", - "ring", "serde", "serde_json", + "signature", "simple_asn1", ] @@ -1662,6 +1852,12 @@ dependencies = [ "spin", ] +[[package]] +name = "lazycell" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" + [[package]] name = "lebe" version = "0.5.2" @@ -1670,9 +1866,9 @@ checksum = "03087c2bad5e1034e8cace5926dec053fb3790248370865f5117a7d0213354c8" [[package]] name = "lettre" -version = "0.11.17" +version = "0.11.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb2a0354e9ece2fcdcf9fa53417f6de587230c0c248068eb058fa26c4a753179" +checksum = "5cb54db6ff7a89efac87dba5baeac57bb9ccd726b49a9b6f21fb92b3966aaf56" dependencies = [ "async-trait", "base64 0.22.1", @@ -1689,7 +1885,7 @@ dependencies = [ "percent-encoding", "quoted_printable", "rustls", - "socket2 0.5.10", + "socket2 0.6.0", "tokio", "tokio-rustls", "url", @@ -1712,6 +1908,16 @@ dependencies = [ "cc", ] +[[package]] +name = "libloading" +version = "0.8.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07033963ba89ebaf1584d767badaa2e8fcec21aedea6b8c0346d487d49c28667" +dependencies = [ + "cfg-if", + "windows-targets 0.48.5", +] + [[package]] name = "libm" version = "0.2.15" @@ -1728,6 +1934,12 @@ dependencies = [ "vcpkg", ] +[[package]] +name = "linux-raw-sys" +version = "0.4.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" + [[package]] name = "linux-raw-sys" version = "0.9.4" @@ -1740,6 +1952,12 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956" +[[package]] +name = "litrs" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f5e54036fe321fd421e10d732f155734c4e4afd610dd556d9a82833ab3ee0bed" + [[package]] name = "lock_api" version = "0.4.13" @@ -1773,11 +1991,11 @@ checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" [[package]] name = "matchers" -version = "0.1.0" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +checksum = "d1525a2a28c7f4fa0fc98bb91ae755d1e2d1505079e05539e35bc876b5d65ae9" dependencies = [ - "regex-automata 0.1.10", + "regex-automata", ] [[package]] @@ -1864,6 +2082,16 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "moxcms" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddd32fa8935aeadb8a8a6b6b351e40225570a37c43de67690383d87ef170cd08" +dependencies = [ + "num-traits", + "pxfm", +] + [[package]] name = "mp3-metadata" version = "0.4.0" @@ -1937,12 +2165,11 @@ checksum = "0676bb32a98c1a483ce53e500a81ad9c3d5b3f7c920c28c24e9cb0980d0b5bc8" [[package]] name = "nu-ansi-term" -version = "0.46.0" +version = "0.50.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +checksum = "d4a28e057d01f97e61255210fcff094d74ed0466038633e95017f5beb68e4399" dependencies = [ - "overload", - "winapi", + "windows-sys 0.52.0", ] [[package]] @@ -2030,15 +2257,6 @@ dependencies = [ "libm", ] -[[package]] -name = "object" -version = "0.36.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" -dependencies = [ - "memchr", -] - [[package]] name = "once_cell" version = "1.21.3" @@ -2051,7 +2269,7 @@ version = "0.10.73" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8505734d46c8ab1e19a1dce3aef597ad87dcb4c37e7188231769bd6bd51cebf8" dependencies = [ - "bitflags 2.9.1", + "bitflags", "cfg-if", "foreign-types", "libc", @@ -2099,12 +2317,6 @@ dependencies = [ "hashbrown 0.14.5", ] -[[package]] -name = "overload" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" - [[package]] name = "parking" version = "2.2.1" @@ -2178,9 +2390,9 @@ dependencies = [ [[package]] name = "percent-encoding" -version = "2.3.1" +version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" [[package]] name = "pest" @@ -2189,7 +2401,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1db05f56d34358a8b1066f67cbb203ee3e7ed2ba674a6263a1d5ec6db2204323" dependencies = [ "memchr", - "thiserror 2.0.12", + "thiserror 2.0.17", "ucd-trie", ] @@ -2226,6 +2438,24 @@ dependencies = [ "sha2", ] +[[package]] +name = "phf" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "913273894cec178f401a31ec4b656318d95473527be05c0752cc41cdc32be8b7" +dependencies = [ + "phf_shared", +] + +[[package]] +name = "phf_shared" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06005508882fb681fd97892ecff4b7fd0fee13ef1aa569f8695dae7ab9099981" +dependencies = [ + "siphasher", +] + [[package]] name = "pin-project" version = "1.1.10" @@ -2269,6 +2499,7 @@ dependencies = [ "base64 0.22.1", "bigdecimal", "chrono", + "chrono-tz", "config", "dotenvy", "feed-rs", @@ -2283,7 +2514,7 @@ dependencies = [ "mime_guess", "mp3-metadata", "qrcode", - "quick-xml 0.38.0", + "quick-xml 0.38.3", "rand 0.9.2", "redis", "regex", @@ -2291,7 +2522,7 @@ dependencies = [ "serde", "serde_json", "sqlx", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "tokio-cron-scheduler", "tokio-stream", @@ -2336,11 +2567,11 @@ checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" [[package]] name = "png" -version = "0.17.16" +version = "0.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82151a2fc869e011c153adc57cf2789ccb8d9906ce52c0b39a6b5697749d7526" +checksum = "97baced388464909d42d89643fe4361939af9b7ce7a31ee32a168f832a70f2a0" dependencies = [ - "bitflags 1.3.2", + "bitflags", "crc32fast", "fdeflate", "flate2", @@ -2371,6 +2602,16 @@ dependencies = [ "zerocopy", ] +[[package]] +name = "prettyplease" +version = "0.2.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff24dfcda44452b9816fff4cd4227e1bb73ff5a2f1bc1105aa92fb8565ce44d2" +dependencies = [ + "proc-macro2", + "syn", +] + [[package]] name = "proc-macro2" version = "1.0.95" @@ -2399,6 +2640,12 @@ dependencies = [ "syn", ] +[[package]] +name = "psl-types" +version = "2.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33cb294fe86a74cbcf50d4445b37da762029549ebeea341421c7c70370f86cac" + [[package]] name = "psm" version = "0.1.26" @@ -2408,6 +2655,25 @@ dependencies = [ "cc", ] +[[package]] +name = "publicsuffix" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f42ea446cab60335f76979ec15e12619a2165b5ae2c12166bef27d283a9fadf" +dependencies = [ + "idna", + "psl-types", +] + +[[package]] +name = "pxfm" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f55f4fedc84ed39cb7a489322318976425e42a147e2be79d8f878e2884f94e84" +dependencies = [ + "num-traits", +] + [[package]] name = "qoi" version = "0.4.1" @@ -2444,9 +2710,9 @@ dependencies = [ [[package]] name = "quick-xml" -version = "0.38.0" +version = "0.38.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8927b0664f5c5a98265138b7e3f90aa19a6b21353182469ace36d4ac527b7b1b" +checksum = "42a232e7487fc2ef313d96dde7948e7a3c05101870d8985e4fd8d26aedd27b89" dependencies = [ "memchr", ] @@ -2462,10 +2728,10 @@ dependencies = [ "pin-project-lite", "quinn-proto", "quinn-udp", - "rustc-hash", + "rustc-hash 2.1.1", "rustls", "socket2 0.5.10", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "tracing", "web-time", @@ -2482,11 +2748,11 @@ dependencies = [ "lru-slab", "rand 0.9.2", "ring", - "rustc-hash", + "rustc-hash 2.1.1", "rustls", "rustls-pki-types", "slab", - "thiserror 2.0.12", + "thiserror 2.0.17", "tinyvec", "tracing", "web-time", @@ -2658,9 +2924,9 @@ dependencies = [ [[package]] name = "redis" -version = "0.32.4" +version = "0.32.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1f66bf4cac9733a23bcdf1e0e01effbaaad208567beba68be8f67e5f4af3ee1" +checksum = "014cc767fefab6a3e798ca45112bccad9c6e0e218fbd49720042716c73cfef44" dependencies = [ "bytes", "cfg-if", @@ -2684,47 +2950,32 @@ version = "0.5.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0d04b7d0ee6b4a0207a0a7adb104d23ecb0b47d6beae7152d0fa34b692b29fd6" dependencies = [ - "bitflags 2.9.1", + "bitflags", ] [[package]] name = "regex" -version = "1.11.1" +version = "1.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" +checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.9", - "regex-syntax 0.8.5", + "regex-automata", + "regex-syntax", ] [[package]] name = "regex-automata" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" -dependencies = [ - "regex-syntax 0.6.29", -] - -[[package]] -name = "regex-automata" -version = "0.4.9" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" +checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.8.5", + "regex-syntax", ] -[[package]] -name = "regex-syntax" -version = "0.6.29" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" - [[package]] name = "regex-syntax" version = "0.8.5" @@ -2733,12 +2984,14 @@ checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "reqwest" -version = "0.12.22" +version = "0.12.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbc931937e6ca3a06e3b6c0aa7841849b160a90351d6ab467a8b9b9959767531" +checksum = "9d0946410b9f7b082a427e4ef5c8ff541a88b357bc6c637c40db3a68ac70a36f" dependencies = [ "base64 0.22.1", "bytes", + "cookie", + "cookie_store", "encoding_rs", "futures-core", "futures-util", @@ -2794,7 +3047,7 @@ dependencies = [ "cfg-if", "getrandom 0.2.16", "libc", - "untrusted", + "untrusted 0.9.0", "windows-sys 0.52.0", ] @@ -2805,7 +3058,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b91f7eff05f748767f183df4320a63d6936e9c6107d97c9e6bdd9784f4289c94" dependencies = [ "base64 0.21.7", - "bitflags 2.9.1", + "bitflags", "serde", "serde_derive", ] @@ -2832,19 +3085,19 @@ dependencies = [ [[package]] name = "rust-ini" -version = "0.21.2" +version = "0.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7295b7ce3bf4806b419dc3420745998b447178b7005e2011947b38fc5aa6791" +checksum = "796e8d2b6696392a43bea58116b667fb4c29727dc5abd27d6acf338bb4f688c7" dependencies = [ "cfg-if", "ordered-multimap", ] [[package]] -name = "rustc-demangle" -version = "0.1.25" +name = "rustc-hash" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "989e6739f80c4ad5b13e0fd7fe89531180375b18520cc8c82080e4dc4035b84f" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] name = "rustc-hash" @@ -2852,16 +3105,29 @@ version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" +[[package]] +name = "rustix" +version = "0.38.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" +dependencies = [ + "bitflags", + "errno", + "libc", + "linux-raw-sys 0.4.15", + "windows-sys 0.52.0", +] + [[package]] name = "rustix" version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c71e83d6afe7ff64890ec6b71d6a69bb8a610ab78ce364b3352876bb4c801266" dependencies = [ - "bitflags 2.9.1", + "bitflags", "errno", "libc", - "linux-raw-sys", + "linux-raw-sys 0.9.4", "windows-sys 0.59.0", ] @@ -2898,7 +3164,7 @@ checksum = "0a17884ae0c1b773f1ccd2bd4a8c72f16da897310a98b0e84bf349ad5ead92fc" dependencies = [ "ring", "rustls-pki-types", - "untrusted", + "untrusted 0.9.0", ] [[package]] @@ -2934,7 +3200,7 @@ version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ - "bitflags 2.9.1", + "bitflags", "core-foundation", "core-foundation-sys", "libc", @@ -2953,18 +3219,40 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.219" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde-untagged" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9faf48a4a2d2693be24c6289dbe26552776eb7737074e6722891fadbe6c5058" +dependencies = [ + "erased-serde", + "serde", + "serde_core", + "typeid", +] + +[[package]] +name = "serde_core" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.219" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" dependencies = [ "proc-macro2", "quote", @@ -2973,14 +3261,15 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.141" +version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30b9eff21ebe718216c6ec64e1d9ac57087aad11efc64e32002bce4a0d4c03d3" +checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c" dependencies = [ "itoa", "memchr", "ryu", "serde", + "serde_core", ] [[package]] @@ -3004,11 +3293,11 @@ dependencies = [ [[package]] name = "serde_spanned" -version = "1.0.0" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40734c41988f7306bb04f0ecf60ec0f3f1caa34290e4e8ea471dcd3346483b83" +checksum = "e24345aa0fe688594e73770a5f6d1b216508b4f93484c0026d521acd30134392" dependencies = [ - "serde", + "serde_core", ] [[package]] @@ -3108,7 +3397,7 @@ checksum = "297f631f50729c8c99b84667867963997ec0b50f32b2a7dbcab828ef0541e8bb" dependencies = [ "num-bigint", "num-traits", - "thiserror 2.0.12", + "thiserror 2.0.17", "time", ] @@ -3215,7 +3504,7 @@ dependencies = [ "serde_json", "sha2", "smallvec", - "thiserror 2.0.12", + "thiserror 2.0.17", "tokio", "tokio-stream", "tracing", @@ -3271,7 +3560,7 @@ dependencies = [ "atoi", "base64 0.22.1", "bigdecimal", - "bitflags 2.9.1", + "bitflags", "byteorder", "bytes", "chrono", @@ -3301,7 +3590,7 @@ dependencies = [ "smallvec", "sqlx-core", "stringprep", - "thiserror 2.0.12", + "thiserror 2.0.17", "tracing", "uuid", "whoami", @@ -3316,7 +3605,7 @@ dependencies = [ "atoi", "base64 0.22.1", "bigdecimal", - "bitflags 2.9.1", + "bitflags", "byteorder", "chrono", "crc", @@ -3342,7 +3631,7 @@ dependencies = [ "smallvec", "sqlx-core", "stringprep", - "thiserror 2.0.12", + "thiserror 2.0.17", "tracing", "uuid", "whoami", @@ -3368,7 +3657,7 @@ dependencies = [ "serde", "serde_urlencoded", "sqlx-core", - "thiserror 2.0.12", + "thiserror 2.0.17", "tracing", "url", "uuid", @@ -3404,6 +3693,33 @@ dependencies = [ "unicode-properties", ] +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "strum" +version = "0.27.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf" +dependencies = [ + "strum_macros", +] + +[[package]] +name = "strum_macros" +version = "0.27.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7695ce3845ea4b33927c055a39dc438a45b059f7c1b3d91d38d10355fb8cbca7" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "subtle" version = "2.6.1" @@ -3447,7 +3763,7 @@ version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" dependencies = [ - "bitflags 2.9.1", + "bitflags", "core-foundation", "system-configuration-sys", ] @@ -3490,7 +3806,7 @@ dependencies = [ "fastrand", "getrandom 0.3.3", "once_cell", - "rustix", + "rustix 1.0.7", "windows-sys 0.59.0", ] @@ -3505,11 +3821,11 @@ dependencies = [ [[package]] name = "thiserror" -version = "2.0.12" +version = "2.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708" +checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8" dependencies = [ - "thiserror-impl 2.0.12", + "thiserror-impl 2.0.17", ] [[package]] @@ -3525,9 +3841,9 @@ dependencies = [ [[package]] name = "thiserror-impl" -version = "2.0.12" +version = "2.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" +checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913" dependencies = [ "proc-macro2", "quote", @@ -3545,13 +3861,16 @@ dependencies = [ [[package]] name = "tiff" -version = "0.9.1" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba1310fcea54c6a9a4fd1aad794ecc02c31682f6bfbecdf460bf19533eed1e3e" +checksum = "af9605de7fee8d9551863fd692cce7637f548dbd9db9180fcc07ccc6d26c336f" dependencies = [ + "fax", "flate2", - "jpeg-decoder", + "half", + "quick-error", "weezl", + "zune-jpeg", ] [[package]] @@ -3621,31 +3940,29 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.46.1" +version = "1.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cc3a2344dafbe23a245241fe8b09735b521110d30fcefbbd5feb1797ca35d17" +checksum = "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408" dependencies = [ - "backtrace", "bytes", - "io-uring", "libc", "mio", "parking_lot", "pin-project-lite", "signal-hook-registry", - "slab", - "socket2 0.5.10", + "socket2 0.6.0", "tokio-macros", - "windows-sys 0.52.0", + "windows-sys 0.61.2", ] [[package]] name = "tokio-cron-scheduler" -version = "0.14.0" +version = "0.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c71ce8f810abc9fabebccc30302a952f9e89c6cf246fafaf170fef164063141" +checksum = "1f50e41f200fd8ed426489bd356910ede4f053e30cebfbd59ef0f856f0d7432a" dependencies = [ "chrono", + "chrono-tz", "croner", "num-derive", "num-traits", @@ -3656,9 +3973,9 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "2.5.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" +checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" dependencies = [ "proc-macro2", "quote", @@ -3711,9 +4028,9 @@ dependencies = [ [[package]] name = "tokio-tungstenite" -version = "0.26.2" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a9daff607c6d2bf6c16fd681ccb7eecc83e4e2cdc1ca067ffaadfca5de7f084" +checksum = "d25a406cddcc431a75d3d9afc6a7c0f7428d4891dd973e4d54c56b46127bf857" dependencies = [ "futures-util", "log", @@ -3723,9 +4040,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.15" +version = "0.7.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66a539a9ad6d5d281510d5bd368c973d636c02dbf8a67300bfb6b950696ad7df" +checksum = "14307c986784f72ef81c89db7d9e28d6ac26d16213b109ea501696195e6e3ce5" dependencies = [ "bytes", "futures-core", @@ -3748,13 +4065,13 @@ dependencies = [ [[package]] name = "toml" -version = "0.9.2" +version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed0aee96c12fa71097902e0bb061a5e1ebd766a6636bb605ba401c45c1650eac" +checksum = "f0dc8b1fb61449e27716ec0e1bdf0f6b8f3e8f6b05391e8497b8b6d7804ea6d8" dependencies = [ - "serde", - "serde_spanned 1.0.0", - "toml_datetime 0.7.0", + "serde_core", + "serde_spanned 1.0.3", + "toml_datetime 0.7.3", "toml_parser", "winnow", ] @@ -3770,11 +4087,11 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.7.0" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bade1c3e902f58d73d3f294cd7f20391c1cb2fbcb643b73566bc773971df91e3" +checksum = "f2cdb639ebbc97961c51720f858597f7f24c4fc295327923af55b74c3c724533" dependencies = [ - "serde", + "serde_core", ] [[package]] @@ -3792,9 +4109,9 @@ dependencies = [ [[package]] name = "toml_parser" -version = "1.0.1" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97200572db069e74c512a14117b296ba0a80a30123fbbb5aa1f4a348f639ca30" +checksum = "c0cbe268d35bdb4bb5a56a2de88d0ad0eb70af5384a99d648cd4b3d04039800e" dependencies = [ "winnow", ] @@ -3838,7 +4155,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2" dependencies = [ "async-compression", - "bitflags 2.9.1", + "bitflags", "bytes", "futures-core", "futures-util", @@ -3932,14 +4249,14 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.3.19" +version = "0.3.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" +checksum = "2054a14f5307d601f88daf0553e1cbf472acc4f2c51afab632431cdcd72124d5" dependencies = [ "matchers", "nu-ansi-term", "once_cell", - "regex", + "regex-automata", "sharded-slab", "smallvec", "thread_local", @@ -3956,9 +4273,9 @@ checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "tungstenite" -version = "0.26.2" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4793cb5e56680ecbb1d843515b23b6de9a75eb04b66643e256a396d43be33c13" +checksum = "8628dcc84e5a09eb3d8423d6cb682965dea9133204e8fb3efee74c2a0c259442" dependencies = [ "bytes", "data-encoding", @@ -3967,10 +4284,16 @@ dependencies = [ "log", "rand 0.9.2", "sha1", - "thiserror 2.0.12", + "thiserror 2.0.17", "utf-8", ] +[[package]] +name = "typeid" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc7d623258602320d5c55d1bc22793b57daff0ec7efc270ea7d55ce1d5f5471c" + [[package]] name = "typenum" version = "1.18.0" @@ -4022,6 +4345,12 @@ version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" +[[package]] +name = "untrusted" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" + [[package]] name = "untrusted" version = "0.9.0" @@ -4030,9 +4359,9 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" -version = "2.5.4" +version = "2.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" +checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b" dependencies = [ "form_urlencoded", "idna", @@ -4060,9 +4389,9 @@ checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" [[package]] name = "uuid" -version = "1.17.0" +version = "1.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cf4199d1e5d15ddd86a694e4d0dffa9c323ce759fea589f00fef9d81cc1931d" +checksum = "2f87b8aa10b915a06587d0dec516c282ff295b475d94abf425d62b57710070a2" dependencies = [ "getrandom 0.3.3", "js-sys", @@ -4264,37 +4593,27 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a751b3277700db47d3e574514de2eced5e54dc8a5436a3bf7a0b248b2cee16f3" [[package]] -name = "whoami" -version = "1.6.0" +name = "which" +version = "4.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6994d13118ab492c3c80c1f81928718159254c53c472bf9ce36f8dae4add02a7" +checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" dependencies = [ - "redox_syscall", - "wasite", + "either", + "home", + "once_cell", + "rustix 0.38.44", ] [[package]] -name = "winapi" -version = "0.3.9" +name = "whoami" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +checksum = "6994d13118ab492c3c80c1f81928718159254c53c472bf9ce36f8dae4add02a7" dependencies = [ - "winapi-i686-pc-windows-gnu", - "winapi-x86_64-pc-windows-gnu", + "redox_syscall", + "wasite", ] -[[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" - -[[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" - [[package]] name = "windows-core" version = "0.61.2" @@ -4303,7 +4622,7 @@ checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3" dependencies = [ "windows-implement", "windows-interface", - "windows-link", + "windows-link 0.1.3", "windows-result", "windows-strings", ] @@ -4336,13 +4655,19 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + [[package]] name = "windows-registry" version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b8a9ed28765efc97bbc954883f4e6796c33a06546ebafacbabee9696967499e" dependencies = [ - "windows-link", + "windows-link 0.1.3", "windows-result", "windows-strings", ] @@ -4353,7 +4678,7 @@ version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" dependencies = [ - "windows-link", + "windows-link 0.1.3", ] [[package]] @@ -4362,7 +4687,7 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" dependencies = [ - "windows-link", + "windows-link 0.1.3", ] [[package]] @@ -4401,6 +4726,15 @@ dependencies = [ "windows-targets 0.53.2", ] +[[package]] +name = "windows-sys" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" +dependencies = [ + "windows-link 0.2.1", +] + [[package]] name = "windows-targets" version = "0.48.5" @@ -4588,9 +4922,9 @@ checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" [[package]] name = "winnow" -version = "0.7.12" +version = "0.7.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3edebf492c8125044983378ecb5766203ad3b4c2f7a922bd7dd207f6d443e95" +checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf" dependencies = [ "memchr", ] @@ -4601,7 +4935,7 @@ version = "0.39.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" dependencies = [ - "bitflags 2.9.1", + "bitflags", ] [[package]] @@ -4612,9 +4946,9 @@ checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" [[package]] name = "yaml-rust2" -version = "0.10.3" +version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ce2a4ff45552406d02501cea6c18d8a7e50228e7736a872951fe2fe75c91be7" +checksum = "2462ea039c445496d8793d052e13787f2b90e750b833afee748e601c17621ed9" dependencies = [ "arraydeque", "encoding_rs", diff --git a/rust-api/Cargo.toml b/rust-api/Cargo.toml index c6198ec1..69c13e5e 100644 --- a/rust-api/Cargo.toml +++ b/rust-api/Cargo.toml @@ -2,84 +2,86 @@ name = "pinepods-api" version = "0.1.0" edition = "2021" +rust-version = "1.89" [dependencies] # Web Framework -axum = { version = "0.8.4", features = ["macros", "multipart", "ws"] } -tokio = { version = "1.46.1", features = ["full"] } +axum = { version = "0.8.6", features = ["macros", "multipart", "ws"] } +tokio = { version = "1.48.0", features = ["full"] } tower = { version = "0.5.2", features = ["util", "timeout", "load-shed", "limit"] } tower-http = { version = "0.6.6", features = ["fs", "trace", "cors", "compression-gzip"] } # Serialization -serde = { version = "1.0.219", features = ["derive"] } -serde_json = "1.0.141" +serde = { version = "1.0.228", features = ["derive"] } +serde_json = "1.0.145" # Database sqlx = { version = "0.8.6", features = ["runtime-tokio-rustls", "postgres", "mysql", "uuid", "chrono", "json", "bigdecimal"] } -bigdecimal = "0.4.8" +bigdecimal = "0.4.9" # Redis/Valkey -redis = { version = "0.32.4", features = ["aio", "tokio-comp"] } +redis = { version = "0.32.7", features = ["aio", "tokio-comp"] } # HTTP Client -reqwest = { version = "0.12.22", features = ["json", "rustls-tls", "stream"] } +reqwest = { version = "0.12.24", features = ["json", "rustls-tls", "stream", "cookies"] } # Configuration and Environment -config = "0.15.13" +config = "0.15.18" dotenvy = "0.15.7" # Logging tracing = "0.1.41" -tracing-subscriber = { version = "0.3.19", features = ["env-filter"] } +tracing-subscriber = { version = "0.3.20", features = ["env-filter"] } # Utilities -uuid = { version = "1.17.0", features = ["v4", "serde"] } -chrono = { version = "0.4.41", features = ["serde"] } -anyhow = "1.0.98" -thiserror = "2.0.12" -async-trait = "0.1.88" +uuid = { version = "1.18.1", features = ["v4", "serde"] } +chrono = { version = "0.4.42", features = ["serde"] } +chrono-tz = "0.10.0" +anyhow = "1.0.100" +thiserror = "2.0.17" +async-trait = "0.1.89" base64 = "0.22.1" lazy_static = "1.5.0" urlencoding = "2.1.3" # Authentication and Crypto -argon2 = "0.6.0-rc.0" -jsonwebtoken = "9.3.1" +argon2 = "0.6.0-rc.1" +jsonwebtoken = { version = "10.1.0", features = ["aws_lc_rs"] } rand = "0.9.2" # MFA/TOTP Support totp-rs = { version = "5.7.0", features = ["otpauth"] } qrcode = "0.14.1" -image = "0.25.6" +image = "0.25.8" # Encryption for sync credentials fernet = "0.2.2" # RSS/Feed Processing feed-rs = "2.3.1" -url = "2.5.4" -regex = "1.11.1" +url = "2.5.7" +regex = "1.12.2" # Audio metadata tagging id3 = "1.16.3" mp3-metadata = "0.4.0" -quick-xml = "0.38.0" +quick-xml = "0.38.3" # Email -lettre = { version = "0.11.17", default-features = false, features = ["tokio1-rustls-tls", "smtp-transport", "builder"] } +lettre = { version = "0.11.18", default-features = false, features = ["tokio1-rustls-tls", "smtp-transport", "builder"] } # CORS and Security -hyper = "1.6.0" +hyper = "1.7.0" # Background Tasks and Task Management -tokio-cron-scheduler = "0.14.0" +tokio-cron-scheduler = "0.15.1" tokio-stream = "0.1.17" futures = "0.3.31" # WebSocket Support (already in axum features) # File handling -tokio-util = { version = "0.7.15", features = ["io"] } +tokio-util = { version = "0.7.16", features = ["io"] } mime_guess = "2.0.5" [dev-dependencies] diff --git a/rust-api/src/config.rs b/rust-api/src/config.rs index 5ad274f3..9fc70f2e 100644 --- a/rust-api/src/config.rs +++ b/rust-api/src/config.rs @@ -9,6 +9,14 @@ pub struct Config { pub server: ServerConfig, pub security: SecurityConfig, pub email: EmailConfig, + pub oidc: OIDCConfig, + pub api: ApiConfig, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ApiConfig { + pub search_api_url: String, + pub people_api_url: String, } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -28,6 +36,9 @@ pub struct RedisConfig { pub host: String, pub port: u16, pub max_connections: u32, + pub password: Option, + pub username: Option, + pub database: Option, } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -52,37 +63,206 @@ pub struct EmailConfig { pub from_email: Option, } +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct OIDCConfig { + pub disable_standard_login: bool, + pub provider_name: Option, + pub client_id: Option, + pub client_secret: Option, + pub authorization_url: Option, + pub token_url: Option, + pub user_info_url: Option, + pub button_text: Option, + pub scope: Option, + pub button_color: Option, + pub button_text_color: Option, + pub icon_svg: Option, + pub name_claim: Option, + pub email_claim: Option, + pub username_claim: Option, + pub roles_claim: Option, + pub user_role: Option, + pub admin_role: Option, +} + +impl OIDCConfig { + pub fn is_configured(&self) -> bool { + self.provider_name.as_ref().map_or(false, |s| !s.trim().is_empty()) && + self.client_id.as_ref().map_or(false, |s| !s.trim().is_empty()) && + self.client_secret.as_ref().map_or(false, |s| !s.trim().is_empty()) && + self.authorization_url.as_ref().map_or(false, |s| !s.trim().is_empty()) && + self.token_url.as_ref().map_or(false, |s| !s.trim().is_empty()) && + self.user_info_url.as_ref().map_or(false, |s| !s.trim().is_empty()) && + self.button_text.as_ref().map_or(false, |s| !s.trim().is_empty()) && + self.scope.as_ref().map_or(false, |s| !s.trim().is_empty()) && + self.button_color.as_ref().map_or(false, |s| !s.trim().is_empty()) && + self.button_text_color.as_ref().map_or(false, |s| !s.trim().is_empty()) + } + + pub fn validate(&self) -> Result<(), String> { + let required_fields = [ + (&self.provider_name, "OIDC_PROVIDER_NAME"), + (&self.client_id, "OIDC_CLIENT_ID"), + (&self.client_secret, "OIDC_CLIENT_SECRET"), + (&self.authorization_url, "OIDC_AUTHORIZATION_URL"), + (&self.token_url, "OIDC_TOKEN_URL"), + (&self.user_info_url, "OIDC_USER_INFO_URL"), + (&self.button_text, "OIDC_BUTTON_TEXT"), + (&self.scope, "OIDC_SCOPE"), + (&self.button_color, "OIDC_BUTTON_COLOR"), + (&self.button_text_color, "OIDC_BUTTON_TEXT_COLOR"), + ]; + + let missing_fields: Vec<&str> = required_fields + .iter() + .filter_map(|(field, name)| if field.is_none() { Some(*name) } else { None }) + .collect(); + + // Check if any OIDC fields are set + let any_oidc_set = required_fields.iter().any(|(field, _)| field.is_some()); + + if any_oidc_set && !missing_fields.is_empty() { + return Err(format!( + "Incomplete OIDC configuration. When setting up OIDC, all required environment variables must be provided. Missing: {}", + missing_fields.join(", ") + )); + } + + if self.disable_standard_login && !self.is_configured() { + return Err("OIDC_DISABLE_STANDARD_LOGIN is set to true, but OIDC is not properly configured. All OIDC environment variables must be set when disabling standard login.".to_string()); + } + + Ok(()) + } +} + impl Config { pub fn new() -> AppResult { // Load environment variables dotenvy::dotenv().ok(); + // Validate required database environment variables + let db_required_vars = [ + ("DB_TYPE", "Database type (e.g., postgresql, mariadb)"), + ("DB_HOST", "Database host (e.g., localhost, db)"), + ("DB_PORT", "Database port (e.g., 5432 for PostgreSQL, 3306 for MariaDB)"), + ("DB_USER", "Database username"), + ("DB_PASSWORD", "Database password"), + ("DB_NAME", "Database name"), + ]; + + let mut missing_db_vars = Vec::new(); + for (var_name, description) in &db_required_vars { + if env::var(var_name).is_err() { + missing_db_vars.push(format!(" {} - {}", var_name, description)); + } + } + + if !missing_db_vars.is_empty() { + return Err(AppError::Config(format!( + "Missing required database environment variables:\n{}\n\nPlease set these variables in your docker-compose.yml or environment.", + missing_db_vars.join("\n") + ))); + } + + // Validate required API URLs + let api_required_vars = [ + ("SEARCH_API_URL", "Search API URL (e.g., https://search.pinepods.online/api/search)"), + ("PEOPLE_API_URL", "People API URL (e.g., https://people.pinepods.online)"), + ]; + + let mut missing_api_vars = Vec::new(); + for (var_name, description) in &api_required_vars { + if env::var(var_name).is_err() { + missing_api_vars.push(format!(" {} - {}", var_name, description)); + } + } + + if !missing_api_vars.is_empty() { + return Err(AppError::Config(format!( + "Missing required API environment variables:\n{}\n\nPlease set these variables in your docker-compose.yml or environment.", + missing_api_vars.join("\n") + ))); + } + + // Validate Valkey/Redis configuration - either URL or individual variables (support both VALKEY_* and REDIS_* naming) + let has_valkey_url = env::var("VALKEY_URL").is_ok(); + let has_redis_url = env::var("REDIS_URL").is_ok(); + let has_valkey_vars = env::var("VALKEY_HOST").is_ok() && env::var("VALKEY_PORT").is_ok(); + let has_redis_vars = env::var("REDIS_HOST").is_ok() && env::var("REDIS_PORT").is_ok(); + + if !has_valkey_url && !has_redis_url && !has_valkey_vars && !has_redis_vars { + return Err(AppError::Config(format!( + "Missing required Valkey/Redis configuration. Please provide either:\n Option 1: VALKEY_URL or REDIS_URL - Complete connection URL\n Option 2: VALKEY_HOST/VALKEY_PORT or REDIS_HOST/REDIS_PORT - Individual connection parameters\n\nExample URL: VALKEY_URL=redis://localhost:6379\nExample individual: VALKEY_HOST=localhost, VALKEY_PORT=6379" + ))); + } let database = DatabaseConfig { - db_type: env::var("DB_TYPE").unwrap_or_else(|_| "mariadb".to_string()), - host: env::var("DB_HOST").unwrap_or_else(|_| "127.0.0.1".to_string()), + db_type: env::var("DB_TYPE").unwrap(), + host: env::var("DB_HOST").unwrap(), port: { - let port_str = env::var("DB_PORT").unwrap_or_else(|_| "3306".to_string()); + let port_str = env::var("DB_PORT").unwrap(); port_str.trim().parse() - .map_err(|e| AppError::Config(format!("Invalid DB_PORT '{}': {}", port_str, e)))? + .map_err(|e| AppError::Config(format!("Invalid DB_PORT '{}': Must be a valid port number (e.g., 5432 for PostgreSQL, 3306 for MariaDB)", port_str)))? }, - username: env::var("DB_USER").unwrap_or_else(|_| "root".to_string()), - password: env::var("DB_PASSWORD").unwrap_or_else(|_| "password".to_string()), - name: env::var("DB_NAME").unwrap_or_else(|_| "pypods_database".to_string()), + username: env::var("DB_USER").unwrap(), + password: env::var("DB_PASSWORD").unwrap(), + name: env::var("DB_NAME").unwrap(), max_connections: 32, min_connections: 1, }; - let redis = RedisConfig { - host: env::var("VALKEY_HOST").unwrap_or_else(|_| "localhost".to_string()), - port: { - env::var("VALKEY_PORT") - .unwrap_or_else(|_| "6379".to_string()) - .trim() - .parse() - .unwrap_or(6379) - }, - max_connections: 32, + let redis = if let Some(url) = env::var("VALKEY_URL").ok().or_else(|| env::var("REDIS_URL").ok()) { + // Parse VALKEY_URL or REDIS_URL + match url::Url::parse(&url) { + Ok(parsed_url) => { + let host = parsed_url.host_str().unwrap_or("localhost").to_string(); + let port = parsed_url.port().unwrap_or(6379); + let username = if parsed_url.username().is_empty() { + None + } else { + Some(parsed_url.username().to_string()) + }; + let password = parsed_url.password().map(|p| p.to_string()); + let database = if parsed_url.path().len() > 1 { + parsed_url.path().trim_start_matches('/').parse().ok() + } else { + None + }; + + RedisConfig { + host, + port, + max_connections: 32, + password, + username, + database, + } + } + Err(e) => { + return Err(AppError::Config(format!("Invalid URL format: {}", e))); + } + } + } else { + // Use individual variables - support both VALKEY_* and REDIS_* (VALKEY_* takes precedence) + let host = env::var("VALKEY_HOST").or_else(|_| env::var("REDIS_HOST")).unwrap(); + let port_str = env::var("VALKEY_PORT").or_else(|_| env::var("REDIS_PORT")).unwrap(); + let port = port_str.trim().parse() + .map_err(|e| AppError::Config(format!("Invalid port '{}': Must be a valid port number (e.g., 6379)", port_str)))?; + let password = env::var("VALKEY_PASSWORD").ok().or_else(|| env::var("REDIS_PASSWORD").ok()); + let username = env::var("VALKEY_USERNAME").ok().or_else(|| env::var("REDIS_USERNAME").ok()); + let database = env::var("VALKEY_DATABASE").ok() + .or_else(|| env::var("REDIS_DATABASE").ok()) + .and_then(|d| d.parse().ok()); + + RedisConfig { + host, + port, + max_connections: 32, + password, + username, + database, + } }; let server = ServerConfig { @@ -104,12 +284,72 @@ impl Config { from_email: env::var("FROM_EMAIL").ok(), }; + // Check if essential OIDC fields are present and non-empty before setting any defaults + let oidc_essentials_present = env::var("OIDC_PROVIDER_NAME").map_or(false, |s| !s.trim().is_empty()) && + env::var("OIDC_CLIENT_ID").map_or(false, |s| !s.trim().is_empty()) && + env::var("OIDC_CLIENT_SECRET").map_or(false, |s| !s.trim().is_empty()) && + env::var("OIDC_AUTHORIZATION_URL").map_or(false, |s| !s.trim().is_empty()) && + env::var("OIDC_TOKEN_URL").map_or(false, |s| !s.trim().is_empty()) && + env::var("OIDC_USER_INFO_URL").map_or(false, |s| !s.trim().is_empty()); + + let oidc = OIDCConfig { + disable_standard_login: env::var("OIDC_DISABLE_STANDARD_LOGIN") + .unwrap_or_else(|_| "false".to_string()) + .parse() + .unwrap_or(false), + provider_name: env::var("OIDC_PROVIDER_NAME").ok(), + client_id: env::var("OIDC_CLIENT_ID").ok(), + client_secret: env::var("OIDC_CLIENT_SECRET").ok(), + authorization_url: env::var("OIDC_AUTHORIZATION_URL").ok(), + token_url: env::var("OIDC_TOKEN_URL").ok(), + user_info_url: env::var("OIDC_USER_INFO_URL").ok(), + button_text: if oidc_essentials_present { + env::var("OIDC_BUTTON_TEXT").ok().or_else(|| Some("Login with OIDC".to_string())) + } else { + env::var("OIDC_BUTTON_TEXT").ok() + }, + scope: if oidc_essentials_present { + env::var("OIDC_SCOPE").ok().or_else(|| Some("openid email profile".to_string())) + } else { + env::var("OIDC_SCOPE").ok() + }, + button_color: if oidc_essentials_present { + env::var("OIDC_BUTTON_COLOR").ok().or_else(|| Some("#000000".to_string())) + } else { + env::var("OIDC_BUTTON_COLOR").ok() + }, + button_text_color: if oidc_essentials_present { + env::var("OIDC_BUTTON_TEXT_COLOR").ok().or_else(|| Some("#FFFFFF".to_string())) + } else { + env::var("OIDC_BUTTON_TEXT_COLOR").ok() + }, + icon_svg: env::var("OIDC_ICON_SVG").ok(), + name_claim: env::var("OIDC_NAME_CLAIM").ok(), + email_claim: env::var("OIDC_EMAIL_CLAIM").ok(), + username_claim: env::var("OIDC_USERNAME_CLAIM").ok(), + roles_claim: env::var("OIDC_ROLES_CLAIM").ok(), + user_role: env::var("OIDC_USER_ROLE").ok(), + admin_role: env::var("OIDC_ADMIN_ROLE").ok(), + }; + + let api = ApiConfig { + search_api_url: env::var("SEARCH_API_URL").unwrap(), + people_api_url: env::var("PEOPLE_API_URL").unwrap(), + }; + + // Validate OIDC configuration + if let Err(validation_error) = oidc.validate() { + return Err(AppError::Config(validation_error)); + } + Ok(Config { database, redis, server, security, email, + oidc, + api, }) } @@ -140,6 +380,26 @@ impl Config { } pub fn redis_url(&self) -> String { - format!("redis://{}:{}", self.redis.host, self.redis.port) + let mut url = String::from("redis://"); + + // Add authentication if provided + if let (Some(username), Some(password)) = (&self.redis.username, &self.redis.password) { + url.push_str(&format!("{}:{}@", + urlencoding::encode(username), + urlencoding::encode(password) + )); + } else if let Some(password) = &self.redis.password { + url.push_str(&format!(":{}@", urlencoding::encode(password))); + } + + // Add host and port + url.push_str(&format!("{}:{}", self.redis.host, self.redis.port)); + + // Add database if specified + if let Some(database) = self.redis.database { + url.push_str(&format!("/{}", database)); + } + + url } } \ No newline at end of file diff --git a/rust-api/src/database.rs b/rust-api/src/database.rs index 2add201a..ce965992 100644 --- a/rust-api/src/database.rs +++ b/rust-api/src/database.rs @@ -1,11 +1,13 @@ use sqlx::{MySql, Pool, Postgres, Row}; use std::time::Duration; -use crate::{config::Config, error::{AppError, AppResult}}; +use crate::{config::{Config, OIDCConfig}, error::{AppError, AppResult}}; use chrono::{DateTime, Utc}; +use chrono_tz::Tz; use std::collections::HashMap; use bigdecimal::ToPrimitive; use std::sync::{Arc, Mutex}; use lazy_static::lazy_static; +use base64; // Global temporary MFA secrets storage (matches Python temp_mfa_secrets) lazy_static! { @@ -394,7 +396,11 @@ impl DatabasePool { "Episodes".episodetitle as episodetitle, "Episodes".episodepubdate as episodepubdate, "Episodes".episodedescription as episodedescription, - "Episodes".episodeartwork as episodeartwork, + CASE + WHEN "Podcasts".usepodcastcoverscustomized = TRUE AND "Podcasts".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + WHEN "Users".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + ELSE "Episodes".episodeartwork + END as episodeartwork, "Episodes".episodeurl as episodeurl, "Episodes".episodeduration as episodeduration, "UserEpisodeHistory".listenduration as listenduration, @@ -406,6 +412,7 @@ impl DatabasePool { FALSE as is_youtube FROM "Episodes" INNER JOIN "Podcasts" ON "Episodes".podcastid = "Podcasts".podcastid + LEFT JOIN "Users" ON "Podcasts".userid = "Users".userid LEFT JOIN "UserEpisodeHistory" ON "Episodes".episodeid = "UserEpisodeHistory".episodeid AND "UserEpisodeHistory".userid = $1 @@ -429,7 +436,11 @@ impl DatabasePool { "YouTubeVideos".videotitle as episodetitle, "YouTubeVideos".publishedat as episodepubdate, "YouTubeVideos".videodescription as episodedescription, - "YouTubeVideos".thumbnailurl as episodeartwork, + CASE + WHEN "Podcasts".usepodcastcoverscustomized = TRUE AND "Podcasts".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + WHEN "Users".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + ELSE "YouTubeVideos".thumbnailurl + END as episodeartwork, "YouTubeVideos".videourl as episodeurl, "YouTubeVideos".duration as episodeduration, "YouTubeVideos".listenposition as listenduration, @@ -441,6 +452,7 @@ impl DatabasePool { TRUE as is_youtube FROM "YouTubeVideos" INNER JOIN "Podcasts" ON "YouTubeVideos".podcastid = "Podcasts".podcastid + LEFT JOIN "Users" ON "Podcasts".userid = "Users".userid LEFT JOIN "SavedVideos" ON "YouTubeVideos".videoid = "SavedVideos".videoid AND "SavedVideos".userid = $2 @@ -496,7 +508,11 @@ impl DatabasePool { Episodes.EpisodeTitle as episodetitle, Episodes.EpisodePubDate as episodepubdate, Episodes.EpisodeDescription as episodedescription, - Episodes.EpisodeArtwork as episodeartwork, + CASE + WHEN Podcasts.UsePodcastCoversCustomized = 1 AND Podcasts.UsePodcastCovers = 1 THEN Podcasts.ArtworkURL + WHEN Users.UsePodcastCovers = 1 THEN Podcasts.ArtworkURL + ELSE Episodes.EpisodeArtwork + END as episodeartwork, Episodes.EpisodeURL as episodeurl, Episodes.EpisodeDuration as episodeduration, UserEpisodeHistory.ListenDuration as listenduration, @@ -508,6 +524,7 @@ impl DatabasePool { FALSE as is_youtube FROM Episodes INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID + LEFT JOIN Users ON Podcasts.UserID = Users.UserID LEFT JOIN UserEpisodeHistory ON Episodes.EpisodeID = UserEpisodeHistory.EpisodeID AND UserEpisodeHistory.UserID = ? @@ -531,7 +548,11 @@ impl DatabasePool { YouTubeVideos.VideoTitle as episodetitle, YouTubeVideos.PublishedAt as episodepubdate, YouTubeVideos.VideoDescription as episodedescription, - YouTubeVideos.ThumbnailURL as episodeartwork, + CASE + WHEN Podcasts.UsePodcastCoversCustomized = 1 AND Podcasts.UsePodcastCovers = 1 THEN Podcasts.ArtworkURL + WHEN Users.UsePodcastCovers = 1 THEN Podcasts.ArtworkURL + ELSE YouTubeVideos.ThumbnailURL + END as episodeartwork, YouTubeVideos.VideoURL as episodeurl, YouTubeVideos.Duration as episodeduration, YouTubeVideos.ListenPosition as listenduration, @@ -543,6 +564,7 @@ impl DatabasePool { TRUE as is_youtube FROM YouTubeVideos INNER JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID + LEFT JOIN Users ON Podcasts.UserID = Users.UserID LEFT JOIN SavedVideos ON YouTubeVideos.VideoID = SavedVideos.VideoID AND SavedVideos.UserID = ? @@ -695,7 +717,7 @@ impl DatabasePool { .await?; if let Some(row) = existing { - let podcast_id: i32 = row.try_get("podcastid")?; + let podcast_id: i32 = row.try_get("PodcastID")?; // Check if there are episodes let episode_count = sqlx::query("SELECT COUNT(*) as count FROM Episodes WHERE PodcastID = ?") .bind(podcast_id) @@ -768,6 +790,125 @@ impl DatabasePool { } } + // Add podcast without episodes - for background episode processing + pub async fn add_podcast_without_episodes( + &self, + podcast_values: &crate::handlers::podcasts::PodcastValues, + podcast_index_id: i64, + username: Option<&str>, + password: Option<&str>, + ) -> AppResult { + match self { + DatabasePool::Postgres(pool) => { + // Check if podcast already exists + let existing = sqlx::query(r#"SELECT podcastid, podcastname, feedurl FROM "Podcasts" WHERE feedurl = $1 AND userid = $2"#) + .bind(&podcast_values.pod_feed_url) + .bind(podcast_values.user_id) + .fetch_optional(pool) + .await?; + + if let Some(row) = existing { + let podcast_id: i32 = row.try_get("podcastid")?; + return Ok(podcast_id); + } + + // Convert categories to string + let category_list = serde_json::to_string(&podcast_values.categories)?; + + // Insert new podcast without episodes + let row = sqlx::query( + r#"INSERT INTO "Podcasts" + (podcastname, artworkurl, author, categories, description, episodecount, + feedurl, websiteurl, explicit, userid, feedcutoffdays, username, password, podcastindexid) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14) + RETURNING podcastid"# + ) + .bind(&podcast_values.pod_title) + .bind(&podcast_values.pod_artwork) + .bind(&podcast_values.pod_author) + .bind(&category_list) + .bind(&podcast_values.pod_description) + .bind(0) // EpisodeCount starts at 0 + .bind(&podcast_values.pod_feed_url) + .bind(&podcast_values.pod_website) + .bind(podcast_values.pod_explicit) + .bind(podcast_values.user_id) + .bind(30) // Default feed cutoff days + .bind(username) + .bind(password) + .bind(podcast_index_id) + .fetch_one(pool) + .await?; + + let podcast_id: i32 = row.try_get("podcastid")?; + + // Update UserStats table + sqlx::query(r#"UPDATE "UserStats" SET podcastsadded = podcastsadded + 1 WHERE userid = $1"#) + .bind(podcast_values.user_id) + .execute(pool) + .await?; + + println!("✅ Added podcast '{}' for user {} (episodes will be processed in background)", + podcast_values.pod_title, podcast_values.user_id); + + Ok(podcast_id) + } + DatabasePool::MySQL(pool) => { + // Check if podcast already exists + let existing = sqlx::query("SELECT PodcastID, PodcastName, FeedURL FROM Podcasts WHERE FeedURL = ? AND UserID = ?") + .bind(&podcast_values.pod_feed_url) + .bind(podcast_values.user_id) + .fetch_optional(pool) + .await?; + + if let Some(row) = existing { + let podcast_id: i32 = row.try_get("PodcastID")?; + return Ok(podcast_id); + } + + // Convert categories to string + let category_list = serde_json::to_string(&podcast_values.categories)?; + + // Insert new podcast without episodes + let result = sqlx::query( + "INSERT INTO Podcasts + (PodcastName, ArtworkURL, Author, Categories, Description, EpisodeCount, + FeedURL, WebsiteURL, Explicit, UserID, FeedCutoffDays, Username, Password, PodcastIndexID) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)" + ) + .bind(&podcast_values.pod_title) + .bind(&podcast_values.pod_artwork) + .bind(&podcast_values.pod_author) + .bind(&category_list) + .bind(&podcast_values.pod_description) + .bind(0) // EpisodeCount starts at 0 + .bind(&podcast_values.pod_feed_url) + .bind(&podcast_values.pod_website) + .bind(podcast_values.pod_explicit) + .bind(podcast_values.user_id) + .bind(30) // Default feed cutoff days + .bind(username) + .bind(password) + .bind(podcast_index_id) + .execute(pool) + .await?; + + let podcast_id = result.last_insert_id() as i32; + + // Update UserStats table + sqlx::query("UPDATE UserStats SET PodcastsAdded = PodcastsAdded + 1 WHERE UserID = ?") + .bind(podcast_values.user_id) + .execute(pool) + .await?; + + println!("✅ Added podcast '{}' for user {} (episodes will be processed in background)", + podcast_values.pod_title, podcast_values.user_id); + + Ok(podcast_id) + } + } + } + // Remove podcast - matches Python remove_podcast function pub async fn remove_podcast( &self, @@ -1094,7 +1235,7 @@ impl DatabasePool { PodcastName as name, FeedURL as feed_url, ArtworkURL as artwork_url, - IsYoutube as is_youtube, + IsYouTubeChannel as is_youtube, AutoDownload as auto_download, Username as username, Password as password, @@ -1286,7 +1427,7 @@ impl DatabasePool { COALESCE(explicit, false) as explicit, COALESCE(podcastindexid, 0) as podcastindexid FROM "Podcasts" - WHERE userid = $1 + WHERE userid = $1 AND COALESCE(displaypodcast, TRUE) = TRUE ORDER BY podcastname"# ) .bind(user_id) @@ -1333,7 +1474,7 @@ impl DatabasePool { COALESCE(Explicit, false) as explicit, COALESCE(PodcastIndexID, 0) as podcastindexid FROM Podcasts - WHERE UserID = ? + WHERE UserID = ? AND COALESCE(DisplayPodcast, 1) = 1 ORDER BY PodcastName" ) .bind(user_id) @@ -1392,7 +1533,7 @@ impl DatabasePool { FROM "Podcasts" p LEFT JOIN "Episodes" e ON p.podcastid = e.podcastid LEFT JOIN "UserEpisodeHistory" ueh ON e.episodeid = ueh.episodeid AND ueh.userid = $1 - WHERE p.userid = $1 + WHERE p.userid = $1 AND COALESCE(p.displaypodcast, TRUE) = TRUE GROUP BY p.podcastid, p.podcastname, p.artworkurl, p.description, p.episodecount, p.websiteurl, p.feedurl, p.author, p.categories, p.explicit, p.podcastindexid, p.isyoutube @@ -1451,14 +1592,14 @@ impl DatabasePool { COUNT(ueh.UserEpisodeHistoryID) as play_count, COUNT(DISTINCT ueh.EpisodeID) as episodes_played, MIN(e.EpisodePubDate) as oldest_episode_date, - COALESCE(p.IsYoutube, false) as is_youtube + COALESCE(p.IsYouTubeChannel, false) as is_youtube FROM Podcasts p LEFT JOIN Episodes e ON p.PodcastID = e.PodcastID LEFT JOIN UserEpisodeHistory ueh ON e.EpisodeID = ueh.EpisodeID AND ueh.UserID = ? - WHERE p.UserID = ? + WHERE p.UserID = ? AND COALESCE(p.DisplayPodcast, 1) = 1 GROUP BY p.PodcastID, p.PodcastName, p.ArtworkURL, p.Description, p.EpisodeCount, p.WebsiteURL, p.FeedURL, p.Author, - p.Categories, p.Explicit, p.PodcastIndexID, p.IsYoutube + p.Categories, p.Explicit, p.PodcastIndexID, p.IsYouTubeChannel ORDER BY p.PodcastName" ) .bind(user_id) @@ -1497,6 +1638,216 @@ impl DatabasePool { } } + // Merge podcasts - set DisplayPodcast=FALSE for secondary podcasts and update primary with merged IDs + pub async fn merge_podcasts(&self, primary_podcast_id: i32, secondary_podcast_ids: &[i32], user_id: i32) -> AppResult<()> { + // Validate that all podcasts belong to the user + for &podcast_id in std::iter::once(&primary_podcast_id).chain(secondary_podcast_ids.iter()) { + let exists = self.verify_podcast_belongs_to_user(podcast_id, user_id).await?; + if !exists { + return Err(crate::error::AppError::forbidden("One or more podcasts do not belong to this user")); + } + } + + // Prevent circular merges - check if any secondary podcasts are already primary podcasts with merges + for &secondary_id in secondary_podcast_ids { + let existing_merges = self.get_merged_podcast_ids(secondary_id).await?; + if !existing_merges.is_empty() { + return Err(crate::error::AppError::bad_request("Cannot merge a podcast that is already a primary podcast with merged podcasts")); + } + } + + match self { + DatabasePool::Postgres(pool) => { + let mut tx = pool.begin().await?; + + // Set DisplayPodcast=FALSE for secondary podcasts + for &secondary_id in secondary_podcast_ids { + sqlx::query(r#"UPDATE "Podcasts" SET displaypodcast = FALSE WHERE podcastid = $1"#) + .bind(secondary_id) + .execute(&mut *tx) + .await?; + } + + // Get current merged IDs for primary podcast + let current_merged_ids = self.get_merged_podcast_ids(primary_podcast_id).await?; + let mut all_merged_ids = current_merged_ids; + all_merged_ids.extend_from_slice(secondary_podcast_ids); + + // Update primary podcast with merged IDs + let merged_json = serde_json::to_string(&all_merged_ids)?; + sqlx::query(r#"UPDATE "Podcasts" SET mergedpodcastids = $1 WHERE podcastid = $2"#) + .bind(merged_json) + .bind(primary_podcast_id) + .execute(&mut *tx) + .await?; + + tx.commit().await?; + } + DatabasePool::MySQL(pool) => { + let mut tx = pool.begin().await?; + + // Set DisplayPodcast=0 for secondary podcasts + for &secondary_id in secondary_podcast_ids { + sqlx::query("UPDATE Podcasts SET DisplayPodcast = 0 WHERE PodcastID = ?") + .bind(secondary_id) + .execute(&mut *tx) + .await?; + } + + // Get current merged IDs for primary podcast + let current_merged_ids = self.get_merged_podcast_ids(primary_podcast_id).await?; + let mut all_merged_ids = current_merged_ids; + all_merged_ids.extend_from_slice(secondary_podcast_ids); + + // Update primary podcast with merged IDs + let merged_json = serde_json::to_string(&all_merged_ids)?; + sqlx::query("UPDATE Podcasts SET MergedPodcastIDs = ? WHERE PodcastID = ?") + .bind(merged_json) + .bind(primary_podcast_id) + .execute(&mut *tx) + .await?; + + tx.commit().await?; + } + } + Ok(()) + } + + // Unmerge a specific podcast from a primary podcast + pub async fn unmerge_podcast(&self, primary_podcast_id: i32, target_podcast_id: i32, user_id: i32) -> AppResult<()> { + // Validate ownership + let primary_exists = self.verify_podcast_belongs_to_user(primary_podcast_id, user_id).await?; + let target_exists = self.verify_podcast_belongs_to_user(target_podcast_id, user_id).await?; + + if !primary_exists || !target_exists { + return Err(crate::error::AppError::forbidden("One or more podcasts do not belong to this user")); + } + + match self { + DatabasePool::Postgres(pool) => { + let mut tx = pool.begin().await?; + + // Get current merged IDs and remove the target + let mut merged_ids = self.get_merged_podcast_ids(primary_podcast_id).await?; + merged_ids.retain(|&id| id != target_podcast_id); + + // Update primary podcast + let merged_json = if merged_ids.is_empty() { + None + } else { + Some(serde_json::to_string(&merged_ids)?) + }; + + sqlx::query(r#"UPDATE "Podcasts" SET mergedpodcastids = $1 WHERE podcastid = $2"#) + .bind(merged_json) + .bind(primary_podcast_id) + .execute(&mut *tx) + .await?; + + // Set DisplayPodcast=TRUE for the unmerged podcast + sqlx::query(r#"UPDATE "Podcasts" SET displaypodcast = TRUE WHERE podcastid = $1"#) + .bind(target_podcast_id) + .execute(&mut *tx) + .await?; + + tx.commit().await?; + } + DatabasePool::MySQL(pool) => { + let mut tx = pool.begin().await?; + + // Get current merged IDs and remove the target + let mut merged_ids = self.get_merged_podcast_ids(primary_podcast_id).await?; + merged_ids.retain(|&id| id != target_podcast_id); + + // Update primary podcast + let merged_json = if merged_ids.is_empty() { + None + } else { + Some(serde_json::to_string(&merged_ids)?) + }; + + sqlx::query("UPDATE Podcasts SET MergedPodcastIDs = ? WHERE PodcastID = ?") + .bind(merged_json) + .bind(primary_podcast_id) + .execute(&mut *tx) + .await?; + + // Set DisplayPodcast=1 for the unmerged podcast + sqlx::query("UPDATE Podcasts SET DisplayPodcast = 1 WHERE PodcastID = ?") + .bind(target_podcast_id) + .execute(&mut *tx) + .await?; + + tx.commit().await?; + } + } + Ok(()) + } + + // Get merged podcast IDs for a primary podcast + pub async fn get_merged_podcast_ids(&self, podcast_id: i32) -> AppResult> { + match self { + DatabasePool::Postgres(pool) => { + let row = sqlx::query(r#"SELECT mergedpodcastids FROM "Podcasts" WHERE podcastid = $1"#) + .bind(podcast_id) + .fetch_optional(pool) + .await?; + + if let Some(row) = row { + if let Some(merged_json) = row.try_get::, _>("mergedpodcastids")? { + let merged_ids: Vec = serde_json::from_str(&merged_json).unwrap_or_default(); + Ok(merged_ids) + } else { + Ok(Vec::new()) + } + } else { + Ok(Vec::new()) + } + } + DatabasePool::MySQL(pool) => { + let row = sqlx::query("SELECT MergedPodcastIDs FROM Podcasts WHERE PodcastID = ?") + .bind(podcast_id) + .fetch_optional(pool) + .await?; + + if let Some(row) = row { + if let Some(merged_json) = row.try_get::, _>("MergedPodcastIDs")? { + let merged_ids: Vec = serde_json::from_str(&merged_json).unwrap_or_default(); + Ok(merged_ids) + } else { + Ok(Vec::new()) + } + } else { + Ok(Vec::new()) + } + } + } + } + + // Helper function to verify a podcast belongs to a user + pub async fn verify_podcast_belongs_to_user(&self, podcast_id: i32, user_id: i32) -> AppResult { + match self { + DatabasePool::Postgres(pool) => { + let count = sqlx::query(r#"SELECT COUNT(*) as count FROM "Podcasts" WHERE podcastid = $1 AND userid = $2"#) + .bind(podcast_id) + .bind(user_id) + .fetch_one(pool) + .await?; + + Ok(count.try_get::("count")? > 0) + } + DatabasePool::MySQL(pool) => { + let count = sqlx::query("SELECT COUNT(*) as count FROM Podcasts WHERE PodcastID = ? AND UserID = ?") + .bind(podcast_id) + .bind(user_id) + .fetch_one(pool) + .await?; + + Ok(count.try_get::("count")? > 0) + } + } + } + // Get time info for user - matches Python get_time_info function pub async fn get_time_info(&self, user_id: i32) -> AppResult { match self { @@ -1796,7 +2147,11 @@ impl DatabasePool { "Podcasts".podcastname as podcastname, "Episodes".episodepubdate as episodepubdate, "Episodes".episodedescription as episodedescription, - "Episodes".episodeartwork as episodeartwork, + CASE + WHEN "Podcasts".usepodcastcoverscustomized = TRUE AND "Podcasts".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + WHEN "Users".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + ELSE "Episodes".episodeartwork + END as episodeartwork, "Episodes".episodeurl as episodeurl, "EpisodeQueue".queueposition as queueposition, "Episodes".episodeduration as episodeduration, @@ -1811,6 +2166,7 @@ impl DatabasePool { FROM "EpisodeQueue" INNER JOIN "Episodes" ON "EpisodeQueue".episodeid = "Episodes".episodeid INNER JOIN "Podcasts" ON "Episodes".podcastid = "Podcasts".podcastid + LEFT JOIN "Users" ON "Podcasts".userid = "Users".userid LEFT JOIN "UserEpisodeHistory" ON "EpisodeQueue".episodeid = "UserEpisodeHistory".episodeid AND "EpisodeQueue".userid = "UserEpisodeHistory".userid @@ -1829,7 +2185,11 @@ impl DatabasePool { "Podcasts".podcastname as podcastname, "YouTubeVideos".publishedat as episodepubdate, "YouTubeVideos".videodescription as episodedescription, - "YouTubeVideos".thumbnailurl as episodeartwork, + CASE + WHEN "Podcasts".usepodcastcoverscustomized = TRUE AND "Podcasts".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + WHEN "Users".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + ELSE "YouTubeVideos".thumbnailurl + END as episodeartwork, "YouTubeVideos".videourl as episodeurl, "EpisodeQueue".queueposition as queueposition, "YouTubeVideos".duration as episodeduration, @@ -1844,6 +2204,7 @@ impl DatabasePool { FROM "EpisodeQueue" INNER JOIN "YouTubeVideos" ON "EpisodeQueue".episodeid = "YouTubeVideos".videoid INNER JOIN "Podcasts" ON "YouTubeVideos".podcastid = "Podcasts".podcastid + LEFT JOIN "Users" ON "Podcasts".userid = "Users".userid LEFT JOIN "SavedVideos" ON "EpisodeQueue".episodeid = "SavedVideos".videoid AND "EpisodeQueue".userid = "SavedVideos".userid @@ -1896,7 +2257,11 @@ impl DatabasePool { Podcasts.PodcastName as podcastname, Episodes.EpisodePubDate as episodepubdate, Episodes.EpisodeDescription as episodedescription, - Episodes.EpisodeArtwork as episodeartwork, + CASE + WHEN Podcasts.UsePodcastCoversCustomized = TRUE AND Podcasts.UsePodcastCovers = TRUE THEN Podcasts.ArtworkURL + WHEN Users.UsePodcastCovers = TRUE THEN Podcasts.ArtworkURL + ELSE Episodes.EpisodeArtwork + END as episodeartwork, Episodes.EpisodeURL as episodeurl, EpisodeQueue.QueuePosition as queueposition, Episodes.EpisodeDuration as episodeduration, @@ -1911,6 +2276,7 @@ impl DatabasePool { FROM EpisodeQueue INNER JOIN Episodes ON EpisodeQueue.EpisodeID = Episodes.EpisodeID INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID + LEFT JOIN Users ON Podcasts.UserID = Users.UserID LEFT JOIN UserEpisodeHistory ON EpisodeQueue.EpisodeID = UserEpisodeHistory.EpisodeID AND EpisodeQueue.UserID = UserEpisodeHistory.UserID @@ -1929,7 +2295,11 @@ impl DatabasePool { Podcasts.PodcastName as podcastname, YouTubeVideos.PublishedAt as episodepubdate, YouTubeVideos.VideoDescription as episodedescription, - YouTubeVideos.ThumbnailURL as episodeartwork, + CASE + WHEN Podcasts.UsePodcastCoversCustomized = TRUE AND Podcasts.UsePodcastCovers = TRUE THEN Podcasts.ArtworkURL + WHEN Users.UsePodcastCovers = TRUE THEN Podcasts.ArtworkURL + ELSE YouTubeVideos.ThumbnailURL + END as episodeartwork, YouTubeVideos.VideoURL as episodeurl, EpisodeQueue.QueuePosition as queueposition, YouTubeVideos.Duration as episodeduration, @@ -1944,6 +2314,7 @@ impl DatabasePool { FROM EpisodeQueue INNER JOIN YouTubeVideos ON EpisodeQueue.EpisodeID = YouTubeVideos.VideoID INNER JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID + LEFT JOIN Users ON Podcasts.UserID = Users.UserID LEFT JOIN SavedVideos ON EpisodeQueue.EpisodeID = SavedVideos.VideoID AND EpisodeQueue.UserID = SavedVideos.UserID @@ -1965,8 +2336,8 @@ impl DatabasePool { episodetitle: row.try_get("episodetitle")?, podcastname: row.try_get("podcastname")?, episodepubdate: { - let naive = row.try_get::("episodepubdate")?; - naive.format("%Y-%m-%dT%H:%M:%S").to_string() + let dt = row.try_get::, _>("episodepubdate")?; + dt.format("%Y-%m-%dT%H:%M:%S").to_string() }, episodedescription: row.try_get("episodedescription")?, episodeartwork: row.try_get("episodeartwork")?, @@ -1974,8 +2345,8 @@ impl DatabasePool { queueposition: row.try_get("queueposition").ok(), episodeduration: row.try_get("episodeduration")?, queuedate: { - let naive = row.try_get::("queuedate")?; - naive.format("%Y-%m-%dT%H:%M:%S").to_string() + let dt = row.try_get::, _>("queuedate")?; + dt.format("%Y-%m-%dT%H:%M:%S").to_string() }, listenduration: row.try_get("listenduration").ok(), episodeid: row.try_get("episodeid")?, @@ -2227,25 +2598,61 @@ impl DatabasePool { } } - // Mark episode as completed - matches Python mark_episode_completed function - pub async fn mark_episode_completed(&self, episode_id: i32, user_id: i32, is_youtube: bool) -> AppResult<()> { + pub async fn update_episode_duration(&self, episode_id: i32, new_duration: i32, is_youtube: bool) -> AppResult<()> { match self { DatabasePool::Postgres(pool) => { if is_youtube { - // Get YouTube video duration - let duration_row = sqlx::query( - r#"SELECT duration FROM "YouTubeVideos" WHERE videoid = $1"# - ) - .bind(episode_id) - .fetch_optional(pool) - .await?; - - if let Some(row) = duration_row { - let duration: Option = row.try_get("duration").ok(); - - if let Some(duration) = duration { - // Update completion status - sqlx::query( + sqlx::query(r#"UPDATE "YouTubeVideos" SET duration = $1 WHERE videoid = $2"#) + .bind(new_duration) + .bind(episode_id) + .execute(pool) + .await?; + } else { + sqlx::query(r#"UPDATE "Episodes" SET episodeduration = $1 WHERE episodeid = $2"#) + .bind(new_duration) + .bind(episode_id) + .execute(pool) + .await?; + } + } + DatabasePool::MySQL(pool) => { + if is_youtube { + sqlx::query(r#"UPDATE YouTubeVideos SET duration = ? WHERE videoid = ?"#) + .bind(new_duration) + .bind(episode_id) + .execute(pool) + .await?; + } else { + sqlx::query(r#"UPDATE Episodes SET episodeduration = ? WHERE episodeid = ?"#) + .bind(new_duration) + .bind(episode_id) + .execute(pool) + .await?; + } + } + } + Ok(()) + } + + // Mark episode as completed - matches Python mark_episode_completed function + pub async fn mark_episode_completed(&self, episode_id: i32, user_id: i32, is_youtube: bool) -> AppResult<()> { + match self { + DatabasePool::Postgres(pool) => { + if is_youtube { + // Get YouTube video duration + let duration_row = sqlx::query( + r#"SELECT duration FROM "YouTubeVideos" WHERE videoid = $1"# + ) + .bind(episode_id) + .fetch_optional(pool) + .await?; + + if let Some(row) = duration_row { + let duration: Option = row.try_get("duration").ok(); + + if let Some(duration) = duration { + // Update completion status + sqlx::query( r#"UPDATE "YouTubeVideos" SET completed = TRUE WHERE videoid = $1"# ) .bind(episode_id) @@ -2472,7 +2879,7 @@ impl DatabasePool { .await?; if let Some(row) = row { - return Ok(Some(row.try_get("podcastid")?)); + return Ok(Some(row.try_get("PodcastID")?)); } // If not found, try with system user (1) @@ -2483,7 +2890,7 @@ impl DatabasePool { .await?; if let Some(row) = row { - Ok(Some(row.try_get("podcastid")?)) + Ok(Some(row.try_get("PodcastID")?)) } else { Ok(None) } @@ -2491,6 +2898,11 @@ impl DatabasePool { } } + // Get episode ID from episode URL - public version of find_episode_by_url + pub async fn get_episode_id_from_url(&self, episode_url: &str, user_id: i32) -> AppResult> { + self.find_episode_by_url(user_id, episode_url).await + } + // Get PinePods version - matches Python get_pinepods_version function pub async fn get_pinepods_version(&self) -> AppResult { match std::fs::read_to_string("/pinepods/current_version") { @@ -2519,16 +2931,6 @@ impl DatabasePool { .await?; if let Some(row) = row { - // Get additional stats from Episodes and Podcasts - let episode_count_row = sqlx::query( - r#"SELECT COUNT(*) as total_episodes FROM "Episodes" e - INNER JOIN "Podcasts" p ON e.podcastid = p.podcastid - WHERE p.userid = $1"# - ) - .bind(user_id) - .fetch_one(pool) - .await?; - // Count saved episodes directly from SavedEpisodes table let saved_count_row = sqlx::query( r#"SELECT COUNT(*) as saved_count FROM "SavedEpisodes" WHERE userid = $1"# @@ -2545,7 +2947,6 @@ impl DatabasePool { .fetch_one(pool) .await?; - let total_episodes: i64 = episode_count_row.try_get("total_episodes")?; let saved_count: i64 = saved_count_row.try_get("saved_count")?; let downloaded_count: i64 = downloaded_count_row.try_get("downloaded_count")?; @@ -2575,15 +2976,6 @@ impl DatabasePool { .await?; if let Some(row) = row { - // Get additional stats from Episodes and Podcasts - let episode_count_row = sqlx::query( - "SELECT COUNT(*) as total_episodes FROM Episodes e - INNER JOIN Podcasts p ON e.PodcastID = p.PodcastID - WHERE p.UserID = ?" - ) - .bind(user_id) - .fetch_one(pool) - .await?; // Count saved episodes directly from SavedEpisodes table let saved_count_row = sqlx::query( @@ -2601,12 +2993,11 @@ impl DatabasePool { .fetch_one(pool) .await?; - let total_episodes: i64 = episode_count_row.try_get("total_episodes")?; let saved_count: i64 = saved_count_row.try_get("saved_count")?; let downloaded_count: i64 = downloaded_count_row.try_get("downloaded_count")?; let stats = serde_json::json!({ - "UserCreated": row.try_get::("UserCreated")?.format("%Y-%m-%dT%H:%M:%S%.f").to_string(), + "UserCreated": row.try_get::, _>("UserCreated")?.format("%Y-%m-%dT%H:%M:%S%.f").to_string(), "PodcastsPlayed": row.try_get::("PodcastsPlayed")?, "TimeListened": row.try_get::("TimeListened")?, "PodcastsAdded": row.try_get::("PodcastsAdded")?, @@ -2646,7 +3037,11 @@ impl DatabasePool { e.episodetitle, e.episodedescription, e.episodeurl, - e.episodeartwork, + CASE + WHEN p.usepodcastcoverscustomized = TRUE AND p.usepodcastcovers = TRUE THEN p.artworkurl + WHEN u.usepodcastcovers = TRUE THEN p.artworkurl + ELSE e.episodeartwork + END as episodeartwork, e.episodepubdate, e.episodeduration, COALESCE(h.listenduration, 0) as listenduration, @@ -2655,6 +3050,7 @@ impl DatabasePool { CASE WHEN eq.episodeid IS NOT NULL THEN true ELSE false END as queued, CASE WHEN de.episodeid IS NOT NULL THEN true ELSE false END as downloaded FROM "Podcasts" p + LEFT JOIN "Users" u ON p.userid = u.userid LEFT JOIN "Episodes" e ON p.podcastid = e.podcastid LEFT JOIN "UserEpisodeHistory" h ON e.episodeid = h.episodeid AND h.userid = $2 LEFT JOIN "SavedEpisodes" se ON e.episodeid = se.episodeid AND se.userid = $2 @@ -2685,7 +3081,7 @@ impl DatabasePool { let result = serde_json::json!({ "podcastid": row.try_get::("podcastid").unwrap_or(0), "podcastname": row.try_get::("podcastname").unwrap_or_default(), - "artworkurl": row.try_get::("artworkurl").unwrap_or_default(), + "artworkurl": row.try_get::, _>("artworkurl").unwrap_or_default().unwrap_or_default(), "author": row.try_get::("author").unwrap_or_default(), "categories": categories_value, "description": row.try_get::("description").unwrap_or_default(), @@ -2731,7 +3127,11 @@ impl DatabasePool { e.EpisodeTitle as episodetitle, e.EpisodeDescription as episodedescription, e.EpisodeURL as episodeurl, - e.EpisodeArtwork as episodeartwork, + CASE + WHEN p.UsePodcastCoversCustomized = TRUE AND p.UsePodcastCovers = TRUE THEN p.ArtworkURL + WHEN u.UsePodcastCovers = TRUE THEN p.ArtworkURL + ELSE e.EpisodeArtwork + END as episodeartwork, e.EpisodePubDate as episodepubdate, e.EpisodeDuration as episodeduration, COALESCE(h.ListenDuration, 0) as listenduration, @@ -2740,6 +3140,7 @@ impl DatabasePool { CASE WHEN eq.EpisodeID IS NOT NULL THEN true ELSE false END as queued, CASE WHEN de.EpisodeID IS NOT NULL THEN true ELSE false END as downloaded FROM Podcasts p + LEFT JOIN Users u ON p.UserID = u.UserID LEFT JOIN Episodes e ON p.PodcastID = e.PodcastID LEFT JOIN UserEpisodeHistory h ON e.EpisodeID = h.EpisodeID AND h.UserID = ? LEFT JOIN SavedEpisodes se ON e.EpisodeID = se.EpisodeID AND se.UserID = ? @@ -2776,7 +3177,7 @@ impl DatabasePool { let result = serde_json::json!({ "podcastid": row.try_get::("podcastid").unwrap_or(0), "podcastname": row.try_get::("podcastname").unwrap_or_default(), - "artworkurl": row.try_get::("artworkurl").unwrap_or_default(), + "artworkurl": row.try_get::, _>("artworkurl").unwrap_or_default().unwrap_or_default(), "author": row.try_get::("author").unwrap_or_default(), "categories": categories_value, "description": row.try_get::("description").unwrap_or_default(), @@ -2826,7 +3227,11 @@ impl DatabasePool { "Episodes".episodetitle, "Episodes".episodepubdate, "Episodes".episodedescription, - "Episodes".episodeartwork, + CASE + WHEN "Podcasts".usepodcastcoverscustomized = TRUE AND "Podcasts".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + WHEN "Users".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + ELSE "Episodes".episodeartwork + END as episodeartwork, "Episodes".episodeurl, "Episodes".episodeduration, "Episodes".completed, @@ -2839,6 +3244,7 @@ impl DatabasePool { CASE WHEN "DownloadedEpisodes".episodeid IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded FROM "Episodes" INNER JOIN "Podcasts" ON "Episodes".podcastid = "Podcasts".podcastid + LEFT JOIN "Users" ON "Podcasts".userid = "Users".userid LEFT JOIN "UserEpisodeHistory" ON "Episodes".episodeid = "UserEpisodeHistory".episodeid AND "UserEpisodeHistory".userid = $1 @@ -2912,7 +3318,11 @@ impl DatabasePool { "Episodes".episodetitle, "Episodes".episodepubdate, "Episodes".episodedescription, - "Episodes".episodeartwork, + CASE + WHEN "Podcasts".usepodcastcoverscustomized = TRUE AND "Podcasts".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + WHEN "Users".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + ELSE "Episodes".episodeartwork + END as episodeartwork, "Episodes".episodeurl, "Episodes".episodeduration, "Episodes".completed, @@ -2926,6 +3336,7 @@ impl DatabasePool { FROM "UserEpisodeHistory" JOIN "Episodes" ON "UserEpisodeHistory".episodeid = "Episodes".episodeid JOIN "Podcasts" ON "Episodes".podcastid = "Podcasts".podcastid + LEFT JOIN "Users" ON "Podcasts".userid = "Users".userid LEFT JOIN "SavedEpisodes" ON "Episodes".episodeid = "SavedEpisodes".episodeid AND "SavedEpisodes".userid = $1 @@ -3095,7 +3506,11 @@ impl DatabasePool { Episodes.EpisodeTitle, Episodes.EpisodePubDate, Episodes.EpisodeDescription, - Episodes.EpisodeArtwork, + CASE + WHEN Podcasts.UsePodcastCoversCustomized = 1 AND Podcasts.UsePodcastCovers = 1 THEN Podcasts.ArtworkURL + WHEN Users.UsePodcastCovers = 1 THEN Podcasts.ArtworkURL + ELSE Episodes.EpisodeArtwork + END as EpisodeArtwork, Episodes.EpisodeURL, Episodes.EpisodeDuration, Episodes.Completed, @@ -3108,6 +3523,7 @@ impl DatabasePool { CASE WHEN DownloadedEpisodes.EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded FROM Episodes INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID + LEFT JOIN Users ON Podcasts.UserID = Users.UserID LEFT JOIN UserEpisodeHistory ON Episodes.EpisodeID = UserEpisodeHistory.EpisodeID AND UserEpisodeHistory.UserID = ? @@ -3147,7 +3563,7 @@ impl DatabasePool { let episodeduration: i32 = row.try_get("EpisodeDuration")?; let completed: bool = row.try_get::("Completed")? != 0; let podcastname: String = row.try_get("PodcastName")?; - let podcastid: i32 = row.try_get("podcastid")?; + let podcastid: i32 = row.try_get("PodcastID")?; let is_youtube: bool = row.try_get::("is_youtube")? != 0; let listenduration: Option = row.try_get("ListenDuration")?; let saved: bool = row.try_get::("saved")? != 0; @@ -3181,7 +3597,11 @@ impl DatabasePool { Episodes.EpisodeTitle, Episodes.EpisodePubDate, Episodes.EpisodeDescription, - Episodes.EpisodeArtwork, + CASE + WHEN Podcasts.UsePodcastCoversCustomized = 1 AND Podcasts.UsePodcastCovers = 1 THEN Podcasts.ArtworkURL + WHEN Users.UsePodcastCovers = 1 THEN Podcasts.ArtworkURL + ELSE Episodes.EpisodeArtwork + END as EpisodeArtwork, Episodes.EpisodeURL, Episodes.EpisodeDuration, Episodes.Completed, @@ -3195,6 +3615,7 @@ impl DatabasePool { FROM UserEpisodeHistory JOIN Episodes ON UserEpisodeHistory.EpisodeID = Episodes.EpisodeID JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID + LEFT JOIN Users ON Podcasts.UserID = Users.UserID LEFT JOIN SavedEpisodes ON Episodes.EpisodeID = SavedEpisodes.EpisodeID AND SavedEpisodes.UserID = ? @@ -3231,7 +3652,7 @@ impl DatabasePool { let episodeduration: i32 = row.try_get("EpisodeDuration")?; let completed: bool = row.try_get::("Completed")? != 0; let podcastname: String = row.try_get("PodcastName")?; - let podcastid: i32 = row.try_get("podcastid")?; + let podcastid: i32 = row.try_get("PodcastID")?; let is_youtube: bool = row.try_get::("is_youtube")? != 0; let listenduration: Option = row.try_get("ListenDuration")?; let saved: bool = row.try_get::("saved")? != 0; @@ -3355,13 +3776,6 @@ impl DatabasePool { match self { DatabasePool::Postgres(pool) => { let query = r#" - WITH filtered_episodes AS ( - SELECT pc.playlistid, pc.episodeid - FROM "PlaylistContents" pc - JOIN "Episodes" e ON pc.episodeid = e.episodeid - JOIN "Podcasts" p ON e.podcastid = p.podcastid - WHERE p.userid = $1 - ) SELECT p.playlistid, p.userid, @@ -3380,17 +3794,13 @@ impl DatabasePool { p.lastupdated, p.created, p.iconname, - COUNT(fe.episodeid)::INTEGER as episode_count + COALESCE(p.episodecount, 0) as episode_count FROM "Playlists" p - LEFT JOIN filtered_episodes fe ON p.playlistid = fe.playlistid - WHERE p.issystemplaylist = TRUE - OR p.userid = $2 - GROUP BY p.playlistid + WHERE p.userid = $1 ORDER BY p.issystemplaylist DESC, p.name ASC "#; let rows = sqlx::query(query) - .bind(user_id) .bind(user_id) .fetch_all(pool) .await?; @@ -3457,13 +3867,6 @@ impl DatabasePool { } DatabasePool::MySQL(pool) => { let query = r#" - WITH filtered_episodes AS ( - SELECT pc.PlaylistID, pc.EpisodeID - FROM PlaylistContents pc - JOIN Episodes e ON pc.EpisodeID = e.EpisodeID - JOIN Podcasts p ON e.PodcastID = p.PodcastID - WHERE p.UserID = ? - ) SELECT p.PlaylistID, p.UserID, @@ -3482,17 +3885,13 @@ impl DatabasePool { p.LastUpdated, p.Created, p.IconName, - COUNT(fe.EpisodeID) as episode_count + COALESCE(p.EpisodeCount, 0) as episode_count FROM Playlists p - LEFT JOIN filtered_episodes fe ON p.PlaylistID = fe.PlaylistID - WHERE p.IsSystemPlaylist = TRUE - OR p.UserID = ? - GROUP BY p.PlaylistID + WHERE p.UserID = ? ORDER BY p.IsSystemPlaylist DESC, p.Name ASC "#; let rows = sqlx::query(query) - .bind(user_id) .bind(user_id) .fetch_all(pool) .await?; @@ -3567,8 +3966,8 @@ impl DatabasePool { "sort_order": row.try_get::, _>("SortOrder")?, "group_by_podcast": row.try_get::("GroupByPodcast")? != 0, "max_episodes": row.try_get::, _>("MaxEpisodes")?, - "last_updated": row.try_get::, _>("LastUpdated")?.map(|dt| dt.format("%Y-%m-%dT%H:%M:%S").to_string()).unwrap_or_default(), - "created": row.try_get::, _>("Created")?.map(|dt| dt.format("%Y-%m-%dT%H:%M:%S").to_string()).unwrap_or_default(), + "last_updated": row.try_get::>, _>("LastUpdated")?.map(|dt| dt.format("%Y-%m-%dT%H:%M:%S").to_string()).unwrap_or_default(), + "created": row.try_get::>, _>("Created")?.map(|dt| dt.format("%Y-%m-%dT%H:%M:%S").to_string()).unwrap_or_default(), "icon_name": row.try_get::, _>("IconName")?.unwrap_or_default(), "episode_count": row.try_get::("episode_count")?, "preview_episodes": preview_episodes @@ -3582,6 +3981,127 @@ impl DatabasePool { } } + // Create playlist - matches Python create_playlist function exactly + pub async fn create_playlist(&self, _config: &Config, playlist_data: &crate::models::CreatePlaylistRequest) -> AppResult { + let min_duration = playlist_data.min_duration.map(|d| d * 60); + let max_duration = playlist_data.max_duration.map(|d| d * 60); + + match self { + DatabasePool::Postgres(pool) => { + let podcast_ids_array = if let Some(ref ids) = playlist_data.podcast_ids { + ids.clone() + } else { + vec![] + }; + + let result = sqlx::query(r#" + INSERT INTO "Playlists" ( + userid, + name, + description, + issystemplaylist, + podcastids, + includeunplayed, + includepartiallyplayed, + includeplayed, + minduration, + maxduration, + sortorder, + groupbypodcast, + maxepisodes, + iconname, + playprogressmin, + playprogressmax, + timefilterhours + ) VALUES ( + $1, $2, $3, FALSE, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16 + ) RETURNING playlistid + "#) + .bind(playlist_data.user_id) + .bind(&playlist_data.name) + .bind(&playlist_data.description) + .bind(&podcast_ids_array) + .bind(playlist_data.include_unplayed) + .bind(playlist_data.include_partially_played) + .bind(playlist_data.include_played) + .bind(min_duration) + .bind(max_duration) + .bind(&playlist_data.sort_order) + .bind(playlist_data.group_by_podcast) + .bind(playlist_data.max_episodes) + .bind(&playlist_data.icon_name) + .bind(playlist_data.play_progress_min) + .bind(playlist_data.play_progress_max) + .bind(playlist_data.time_filter_hours) + .fetch_one(pool) + .await?; + + let playlist_id = result.get::("playlistid"); + + // Update playlist contents immediately like Python does + self.update_playlist_contents(playlist_id).await?; + + Ok(playlist_id) + } + DatabasePool::MySQL(pool) => { + let podcast_ids_json = if let Some(ref ids) = playlist_data.podcast_ids { + serde_json::to_string(ids)? + } else { + "[]".to_string() + }; + + let result = sqlx::query(r#" + INSERT INTO Playlists ( + UserID, + Name, + Description, + IsSystemPlaylist, + PodcastIDs, + IncludeUnplayed, + IncludePartiallyPlayed, + IncludePlayed, + MinDuration, + MaxDuration, + SortOrder, + GroupByPodcast, + MaxEpisodes, + IconName, + PlayProgressMin, + PlayProgressMax, + TimeFilterHours + ) VALUES ( + ?, ?, ?, FALSE, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ? + ) + "#) + .bind(playlist_data.user_id) + .bind(&playlist_data.name) + .bind(&playlist_data.description) + .bind(&podcast_ids_json) + .bind(playlist_data.include_unplayed) + .bind(playlist_data.include_partially_played) + .bind(playlist_data.include_played) + .bind(min_duration) + .bind(max_duration) + .bind(&playlist_data.sort_order) + .bind(playlist_data.group_by_podcast) + .bind(playlist_data.max_episodes) + .bind(&playlist_data.icon_name) + .bind(playlist_data.play_progress_min) + .bind(playlist_data.play_progress_max) + .bind(playlist_data.time_filter_hours) + .execute(pool) + .await?; + + let playlist_id = result.last_insert_id() as i32; + + // Update playlist contents immediately like Python does + self.update_playlist_contents(playlist_id).await?; + + Ok(playlist_id) + } + } + } + // Mark episode as uncompleted - matches Python mark_episode_uncompleted function pub async fn mark_episode_uncompleted(&self, episode_id: i32, user_id: i32, is_youtube: bool) -> AppResult<()> { match self { @@ -3664,7 +4184,11 @@ impl DatabasePool { "Episodes".episodepubdate as episodepubdate, "Episodes".episodedescription as episodedescription, "Episodes".episodeid as episodeid, - "Episodes".episodeartwork as episodeartwork, + CASE + WHEN "Podcasts".usepodcastcoverscustomized = TRUE AND "Podcasts".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + WHEN "Users".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + ELSE "Episodes".episodeartwork + END as episodeartwork, "Episodes".episodeurl as episodeurl, "Episodes".episodeduration as episodeduration, "Podcasts".websiteurl as websiteurl, @@ -3673,10 +4197,12 @@ impl DatabasePool { TRUE as saved, CASE WHEN "EpisodeQueue".episodeid IS NOT NULL THEN TRUE ELSE FALSE END AS queued, CASE WHEN "DownloadedEpisodes".episodeid IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded, - FALSE as is_youtube + FALSE as is_youtube, + "Podcasts".podcastid as podcastid FROM "SavedEpisodes" INNER JOIN "Episodes" ON "SavedEpisodes".episodeid = "Episodes".episodeid INNER JOIN "Podcasts" ON "Episodes".podcastid = "Podcasts".podcastid + LEFT JOIN "Users" ON "Podcasts".userid = "Users".userid LEFT JOIN "UserEpisodeHistory" ON "SavedEpisodes".episodeid = "UserEpisodeHistory".episodeid AND "UserEpisodeHistory".userid = $1 @@ -3697,7 +4223,11 @@ impl DatabasePool { "YouTubeVideos".publishedat as episodepubdate, "YouTubeVideos".videodescription as episodedescription, "YouTubeVideos".videoid as episodeid, - "YouTubeVideos".thumbnailurl as episodeartwork, + CASE + WHEN "Podcasts".usepodcastcoverscustomized = TRUE AND "Podcasts".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + WHEN "Users".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + ELSE "YouTubeVideos".thumbnailurl + END as episodeartwork, "YouTubeVideos".videourl as episodeurl, "YouTubeVideos".duration as episodeduration, "Podcasts".websiteurl as websiteurl, @@ -3706,10 +4236,12 @@ impl DatabasePool { TRUE as saved, CASE WHEN "EpisodeQueue".episodeid IS NOT NULL AND "EpisodeQueue".is_youtube = TRUE THEN TRUE ELSE FALSE END AS queued, CASE WHEN "DownloadedVideos".videoid IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded, - TRUE as is_youtube + TRUE as is_youtube, + "Podcasts".podcastid as podcastid FROM "SavedVideos" INNER JOIN "YouTubeVideos" ON "SavedVideos".videoid = "YouTubeVideos".videoid INNER JOIN "Podcasts" ON "YouTubeVideos".podcastid = "Podcasts".podcastid + LEFT JOIN "Users" ON "Podcasts".userid = "Users".userid LEFT JOIN "EpisodeQueue" ON "SavedVideos".videoid = "EpisodeQueue".episodeid AND "EpisodeQueue".userid = $5 @@ -3752,6 +4284,7 @@ impl DatabasePool { queued: row.try_get("queued")?, downloaded: row.try_get("downloaded")?, is_youtube: row.try_get("is_youtube")?, + podcastid: row.try_get("podcastid").ok(), }); } Ok(episodes) @@ -3765,7 +4298,11 @@ impl DatabasePool { Episodes.EpisodePubDate as episodepubdate, Episodes.EpisodeDescription as episodedescription, Episodes.EpisodeID as episodeid, - Episodes.EpisodeArtwork as episodeartwork, + CASE + WHEN Podcasts.UsePodcastCoversCustomized = 1 AND Podcasts.UsePodcastCovers = 1 THEN Podcasts.ArtworkURL + WHEN Users.UsePodcastCovers = 1 THEN Podcasts.ArtworkURL + ELSE Episodes.EpisodeArtwork + END as episodeartwork, Episodes.EpisodeURL as episodeurl, Episodes.EpisodeDuration as episodeduration, Podcasts.WebsiteURL as websiteurl, @@ -3774,10 +4311,12 @@ impl DatabasePool { TRUE as saved, CASE WHEN EpisodeQueue.EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS queued, CASE WHEN DownloadedEpisodes.EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded, - FALSE as is_youtube + FALSE as is_youtube, + Podcasts.PodcastID as podcastid FROM SavedEpisodes INNER JOIN Episodes ON SavedEpisodes.EpisodeID = Episodes.EpisodeID INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID + LEFT JOIN Users ON Podcasts.UserID = Users.UserID LEFT JOIN UserEpisodeHistory ON SavedEpisodes.EpisodeID = UserEpisodeHistory.EpisodeID AND UserEpisodeHistory.UserID = ? @@ -3798,7 +4337,11 @@ impl DatabasePool { YouTubeVideos.PublishedAt as episodepubdate, YouTubeVideos.VideoDescription as episodedescription, YouTubeVideos.VideoID as episodeid, - YouTubeVideos.ThumbnailURL as episodeartwork, + CASE + WHEN Podcasts.UsePodcastCoversCustomized = 1 AND Podcasts.UsePodcastCovers = 1 THEN Podcasts.ArtworkURL + WHEN Users.UsePodcastCovers = 1 THEN Podcasts.ArtworkURL + ELSE YouTubeVideos.ThumbnailURL + END as episodeartwork, YouTubeVideos.VideoURL as episodeurl, YouTubeVideos.Duration as episodeduration, Podcasts.WebsiteURL as websiteurl, @@ -3807,10 +4350,12 @@ impl DatabasePool { TRUE as saved, CASE WHEN EpisodeQueue.EpisodeID IS NOT NULL AND EpisodeQueue.is_youtube = TRUE THEN TRUE ELSE FALSE END AS queued, CASE WHEN DownloadedVideos.VideoID IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded, - TRUE as is_youtube + TRUE as is_youtube, + Podcasts.PodcastID as podcastid FROM SavedVideos INNER JOIN YouTubeVideos ON SavedVideos.VideoID = YouTubeVideos.VideoID INNER JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID + LEFT JOIN Users ON Podcasts.UserID = Users.UserID LEFT JOIN EpisodeQueue ON SavedVideos.VideoID = EpisodeQueue.EpisodeID AND EpisodeQueue.UserID = ? @@ -3853,6 +4398,7 @@ impl DatabasePool { queued: row.try_get("queued")?, downloaded: row.try_get("downloaded")?, is_youtube: row.try_get("is_youtube")?, + podcastid: row.try_get("podcastid").ok(), }); } Ok(episodes) @@ -4310,10 +4856,10 @@ impl DatabasePool { fn generate_api_key(&self) -> String { use rand::Rng; const CHARSET: &[u8] = b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; - let mut rng = rand::thread_rng(); + let mut rng = rand::rng(); (0..64) .map(|_| { - let idx = rng.gen_range(0..CHARSET.len()); + let idx = rng.random_range(0..CHARSET.len()); CHARSET[idx] as char }) .collect() @@ -4382,44 +4928,85 @@ impl DatabasePool { podcast_id: i32, feed_url: &str, artwork_url: &str, - auto_download: bool, + _auto_download: bool, username: Option<&str>, password: Option<&str>, ) -> AppResult> { // Fetch the RSS feed let content = self.try_fetch_feed(feed_url, username, password).await?; - // Parse the RSS feed - let episodes = self.parse_rss_feed(&content, podcast_id, artwork_url).await?; + // Parse the RSS feed - enable duration estimation for initial podcast adding + let episodes = self.parse_rss_feed_with_options(&content, podcast_id, artwork_url, true).await?; let mut first_episode_id = None; for episode in episodes { - // Check if episode already exists - let exists = match self { + // Check if episode already exists by EITHER title OR url + // This handles cases where feed maintainers edit episodes + let existing_episode_id = match self { DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#"SELECT episodeid FROM "Episodes" WHERE podcastid = $1 AND episodetitle = $2"#) + let row = sqlx::query(r#"SELECT episodeid FROM "Episodes" WHERE podcastid = $1 AND (episodetitle = $2 OR episodeurl = $3)"#) .bind(podcast_id) .bind(&episode.title) + .bind(&episode.url) .fetch_optional(pool) .await?; - row.is_some() + row.map(|r| r.try_get::("episodeid").ok()).flatten() } DatabasePool::MySQL(pool) => { - let row = sqlx::query("SELECT EpisodeID FROM Episodes WHERE PodcastID = ? AND EpisodeTitle = ?") + let row = sqlx::query("SELECT EpisodeID FROM Episodes WHERE PodcastID = ? AND (EpisodeTitle = ? OR EpisodeURL = ?)") .bind(podcast_id) .bind(&episode.title) + .bind(&episode.url) .fetch_optional(pool) .await?; - row.is_some() + row.map(|r| r.try_get::("EpisodeID").ok()).flatten() } }; - - if exists { + + if let Some(episode_id) = existing_episode_id { + // Episode already exists (by title or URL) - UPDATE it with new metadata + match self { + DatabasePool::Postgres(pool) => { + sqlx::query( + r#"UPDATE "Episodes" + SET episodetitle = $1, episodedescription = $2, episodeurl = $3, + episodeartwork = $4, episodepubdate = $5, episodeduration = $6 + WHERE episodeid = $7"# + ) + .bind(&episode.title) + .bind(&episode.description) + .bind(&episode.url) + .bind(&episode.artwork_url) + .bind(&episode.pub_date) + .bind(episode.duration) + .bind(episode_id) + .execute(pool) + .await?; + } + DatabasePool::MySQL(pool) => { + sqlx::query( + "UPDATE Episodes + SET EpisodeTitle = ?, EpisodeDescription = ?, EpisodeURL = ?, + EpisodeArtwork = ?, EpisodePubDate = ?, EpisodeDuration = ? + WHERE EpisodeID = ?" + ) + .bind(&episode.title) + .bind(&episode.description) + .bind(&episode.url) + .bind(&episode.artwork_url) + .bind(&episode.pub_date) + .bind(episode.duration) + .bind(episode_id) + .execute(pool) + .await?; + } + } + // Skip to next episode - don't insert or send notification for updates continue; } - - // Insert new episode + + // Insert new episode (neither title nor URL exists) let episode_id = match self { DatabasePool::Postgres(pool) => { let row = sqlx::query( @@ -4498,29 +5085,85 @@ impl DatabasePool { let mut new_episodes = Vec::new(); - for episode in episodes { - // Check if episode already exists - let exists = match self { + for mut episode in episodes { + // Check if episode already exists by EITHER title OR url + // This handles cases where feed maintainers edit episodes + let existing_episode_id = match self { DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#"SELECT episodeid FROM "Episodes" WHERE podcastid = $1 AND episodetitle = $2"#) + let row = sqlx::query(r#"SELECT episodeid FROM "Episodes" WHERE podcastid = $1 AND (episodetitle = $2 OR episodeurl = $3)"#) .bind(podcast_id) .bind(&episode.title) + .bind(&episode.url) .fetch_optional(pool) .await?; - row.is_some() + row.map(|r| r.try_get::("episodeid").ok()).flatten() } DatabasePool::MySQL(pool) => { - let row = sqlx::query("SELECT EpisodeID FROM Episodes WHERE PodcastID = ? AND EpisodeTitle = ?") + let row = sqlx::query("SELECT EpisodeID FROM Episodes WHERE PodcastID = ? AND (EpisodeTitle = ? OR EpisodeURL = ?)") .bind(podcast_id) .bind(&episode.title) + .bind(&episode.url) .fetch_optional(pool) .await?; - row.is_some() + row.map(|r| r.try_get::("EpisodeID").ok()).flatten() } }; - - if exists { - continue; // Episode already exists, skip it + + if let Some(episode_id) = existing_episode_id { + // Episode already exists (by title or URL) - UPDATE it with new metadata + match self { + DatabasePool::Postgres(pool) => { + sqlx::query( + r#"UPDATE "Episodes" + SET episodetitle = $1, episodedescription = $2, episodeurl = $3, + episodeartwork = $4, episodepubdate = $5, episodeduration = $6 + WHERE episodeid = $7"# + ) + .bind(&episode.title) + .bind(&episode.description) + .bind(&episode.url) + .bind(&episode.artwork_url) + .bind(&episode.pub_date) + .bind(episode.duration) + .bind(episode_id) + .execute(pool) + .await?; + } + DatabasePool::MySQL(pool) => { + sqlx::query( + "UPDATE Episodes + SET EpisodeTitle = ?, EpisodeDescription = ?, EpisodeURL = ?, + EpisodeArtwork = ?, EpisodePubDate = ?, EpisodeDuration = ? + WHERE EpisodeID = ?" + ) + .bind(&episode.title) + .bind(&episode.description) + .bind(&episode.url) + .bind(&episode.artwork_url) + .bind(&episode.pub_date) + .bind(episode.duration) + .bind(episode_id) + .execute(pool) + .await?; + } + } + // Skip to next episode - don't add to new_episodes list for updates + continue; + } + + // This is a NEW episode - estimate duration if missing + if episode.duration == 0 { + let audio_url = &episode.url; + if !audio_url.is_empty() { + if let Ok(handle) = tokio::runtime::Handle::try_current() { + if let Some(estimated_duration) = tokio::task::block_in_place(|| { + handle.block_on(self.estimate_duration_from_audio_url_async(audio_url)) + }) { + episode.duration = estimated_duration; + println!("Estimated duration {} seconds for new episode: {}", estimated_duration, episode.title); + } + } + } } // Insert new episode @@ -4796,16 +5439,72 @@ impl DatabasePool { username: Option<&str>, password: Option<&str>, ) -> AppResult { - let client = reqwest::Client::new(); - let mut request = client.get(url).header("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36"); + println!("try_fetch_feed called with URL: {}", url); + if let (Some(user), Some(pass)) = (username, password) { + println!("Using basic authentication for feed: {}", url); + } else { + println!("No authentication for feed: {}", url); + } + + // Build HTTP client with proper configuration for container environment + let client = reqwest::Client::builder() + .user_agent("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36") + .timeout(std::time::Duration::from_secs(30)) + .build() + .map_err(|e| { + println!("Failed to build HTTP client: {}", e); + AppError::Http(e) + })?; + + let mut request = client.get(url); if let (Some(user), Some(pass)) = (username, password) { + println!("Adding basic auth to request for user: {}", user); request = request.basic_auth(user, Some(pass)); } - let response = request.send().await.map_err(|e| AppError::Http(e))?; + println!("Sending HTTP request to: {}", url); + let response = request.send().await.map_err(|e| { + println!("HTTP request failed for {}: {}", url, e); + AppError::Http(e) + })?; if !response.status().is_success() { + // If we get a 403, the server might be blocking browser User-Agents + // Try with a podcast client User-Agent first + if response.status() == 403 { + println!("Got 403 Forbidden, trying with podcast client User-Agent"); + + let podcast_client = reqwest::Client::builder() + .user_agent("PinePods/1.0") + .timeout(std::time::Duration::from_secs(30)) + .build() + .map_err(|e| { + println!("Failed to build podcast client: {}", e); + AppError::Http(e) + })?; + + let mut podcast_request = podcast_client.get(url); + + if let (Some(user), Some(pass)) = (username, password) { + println!("Adding basic auth to podcast client request for user: {}", user); + podcast_request = podcast_request.basic_auth(user, Some(pass)); + } + + let podcast_response = podcast_request.send().await.map_err(|e| { + println!("Podcast client request failed for {}: {}", url, e); + AppError::Http(e) + })?; + + if podcast_response.status().is_success() { + println!("Podcast client request succeeded with status: {}", podcast_response.status()); + return Ok(podcast_response.text().await.map_err(|e| AppError::Http(e))?); + } + + println!("Podcast client request also failed with status: {}", podcast_response.status()); + } + + println!("Initial request failed with status: {}, trying alternate URL", response.status()); // Try alternate URL (www vs non-www) let alternate_url = if url.contains("://www.") { url.replace("://www.", "://") @@ -4813,21 +5512,29 @@ impl DatabasePool { url.replace("://", "://www.") }; - let mut alt_request = client.get(&alternate_url).header("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36"); + println!("Trying alternate URL: {}", alternate_url); + let mut alt_request = client.get(&alternate_url); if let (Some(user), Some(pass)) = (username, password) { + println!("Adding basic auth to alternate request for user: {}", user); alt_request = alt_request.basic_auth(user, Some(pass)); } - let alt_response = alt_request.send().await.map_err(|e| AppError::Http(e))?; + let alt_response = alt_request.send().await.map_err(|e| { + println!("Alternate HTTP request failed for {}: {}", alternate_url, e); + AppError::Http(e) + })?; if !alt_response.status().is_success() { - return Err(AppError::bad_request("Invalid username or password")); + println!("Alternate request also failed with status: {}", alt_response.status()); + return Err(AppError::bad_request(&format!("Feed request failed: HTTP {}", alt_response.status()))); } + println!("Alternate request succeeded with status: {}", alt_response.status()); return Ok(alt_response.text().await.map_err(|e| AppError::Http(e))?); } + println!("Request succeeded with status: {}", response.status()); Ok(response.text().await.map_err(|e| AppError::Http(e))?) } @@ -4851,8 +5558,14 @@ impl DatabasePool { // Skip empty values if !title_str.is_empty() && !duration_str.is_empty() { - println!("🕐 Raw iTunes duration extracted: title='{}' duration='{}'", title_str, duration_str); - raw_durations.insert(title_str.to_string(), duration_str.to_string()); + // Decode HTML entities in title to match feed-rs parsed titles + let decoded_title = title_str + .replace("'", "'") + .replace(""", "\"") + .replace("&", "&") + .replace("<", "<") + .replace(">", ">"); + raw_durations.insert(decoded_title, duration_str.to_string()); } } } @@ -4868,14 +5581,21 @@ impl DatabasePool { let duration_str = duration_match.as_str().trim(); // Only add if not already found and both values are non-empty - if !title_str.is_empty() && !duration_str.is_empty() && !raw_durations.contains_key(title_str) { - println!("🕐 Raw iTunes duration extracted (reverse): title='{}' duration='{}'", title_str, duration_str); - raw_durations.insert(title_str.to_string(), duration_str.to_string()); + if !title_str.is_empty() && !duration_str.is_empty() { + // Decode HTML entities in title to match feed-rs parsed titles + let decoded_title = title_str + .replace("'", "'") + .replace(""", "\"") + .replace("&", "&") + .replace("<", "<") + .replace(">", ">"); + if !raw_durations.contains_key(&decoded_title) { + raw_durations.insert(decoded_title, duration_str.to_string()); + } } } } - println!("🕐 Total raw iTunes durations extracted: {}", raw_durations.len()); raw_durations } @@ -4883,10 +5603,20 @@ impl DatabasePool { async fn parse_rss_feed( &self, content: &str, - podcast_id: i32, + _podcast_id: i32, artwork_url: &str, ) -> AppResult> { - use chrono::{DateTime, Utc}; + self.parse_rss_feed_with_options(content, _podcast_id, artwork_url, false).await + } + + async fn parse_rss_feed_with_options( + &self, + content: &str, + _podcast_id: i32, + artwork_url: &str, + estimate_missing_durations: bool, + ) -> AppResult> { + use chrono::Utc; use feed_rs::parser; use std::collections::HashMap; @@ -4898,22 +5628,7 @@ impl DatabasePool { let mut episodes = Vec::new(); - println!("🔍 Feed parsed successfully, found {} entries", feed.entries.len()); - for (i, entry) in feed.entries.iter().enumerate() { - println!("🔍 Entry {}: title={:?}", i, entry.title.as_ref().map(|t| &t.content)); - println!("🔍 Entry {}: links count={}", i, entry.links.len()); - for (j, link) in entry.links.iter().enumerate() { - println!("🔍 Entry {} Link {}: href={}, media_type={:?}, rel={:?}", i, j, link.href, link.media_type, link.rel); - } - println!("🔍 Entry {}: media count={}", i, entry.media.len()); - for (j, media) in entry.media.iter().enumerate() { - println!("🔍 Entry {} Media {}: content count={}", i, j, media.content.len()); - for (k, content) in media.content.iter().enumerate() { - println!("🔍 Entry {} Media {} Content {}: url={:?}, content_type={:?}", i, j, k, content.url, content.content_type); - } - } - } for entry in feed.entries { // EXACT Python replication: if not all(hasattr(entry, attr) for attr in ["title", "summary", "enclosures"]): continue @@ -4963,15 +5678,19 @@ impl DatabasePool { // Also check for RSS tags that might not be in links // feed_rs should put enclosures in the links, but as fallback check media - // Published date + // Published date - store under both keys for compatibility if let Some(published) = &entry.published { - episode_data.insert("published".to_string(), published.to_rfc3339()); + let date_str = published.to_rfc3339(); + // println!("📅 DEBUG: feed-rs extracted published date: {}", date_str); + episode_data.insert("published".to_string(), date_str.clone()); + episode_data.insert("pubDate".to_string(), date_str); + } else { + // println!("⚠️ DEBUG: No published date found in feed-rs entry for episode: {:?}", entry.title); } // Media extensions for media in &entry.media { if let Some(duration) = &media.duration { - println!("🕐 Found media duration: {} seconds", duration.as_secs()); episode_data.insert("duration".to_string(), duration.as_secs().to_string()); // Don't use feed_rs processed duration for iTunes - we'll use raw values } @@ -4979,7 +5698,6 @@ impl DatabasePool { // Check if we have a raw iTunes duration for this episode title if let Some(title) = &entry.title { if let Some(raw_duration) = raw_durations.get(&title.content) { - println!("🕐 Using raw iTunes duration: '{}' for episode '{}'", raw_duration, title.content); episode_data.insert("itunes:duration".to_string(), raw_duration.clone()); } } @@ -5009,13 +5727,9 @@ impl DatabasePool { } // Debug what we're passing to duration parsing - println!("🕐 Episode '{}' duration data: itunes:duration={:?}, duration={:?}", - episode_data.get("title").unwrap_or(&"NO TITLE".to_string()), - episode_data.get("itunes:duration"), - episode_data.get("duration")); // Apply all the Python-style parsing logic with ALL fallbacks - self.apply_python_style_parsing(&mut episode, &episode_data, artwork_url); + self.apply_python_style_parsing(&mut episode, &episode_data, artwork_url, estimate_missing_durations); if !episode.title.is_empty() { episodes.push(episode); @@ -5026,14 +5740,13 @@ impl DatabasePool { } // Apply Python-style parsing logic with all fallbacks - fn apply_python_style_parsing(&self, episode: &mut EpisodeData, data: &HashMap, default_artwork: &str) { + fn apply_python_style_parsing(&self, episode: &mut EpisodeData, data: &HashMap, default_artwork: &str, estimate_missing_durations: bool) { // Title - REQUIRED field with robust cleaning if let Some(title) = data.get("title") { episode.title = self.clean_and_normalize_title(title); } // Skip episodes without titles - this is critical like Python version if episode.title.is_empty() { - println!("⚠️ Skipping episode with no title"); return; } @@ -5044,10 +5757,6 @@ impl DatabasePool { episode.url = self.parse_audio_url_comprehensive(data); // Debug logging for episode URL extraction - println!("🎵 Episode URL extraction: title='{}', enclosure_url={:?}, final_url='{}'", - episode.title, - data.get("enclosure_url"), - episode.url); // Artwork with comprehensive fallbacks and validation like Python episode.artwork_url = self.parse_artwork_comprehensive(data, default_artwork); @@ -5056,8 +5765,7 @@ impl DatabasePool { episode.pub_date = self.parse_publication_date_comprehensive(data); // Duration parsing with extensive fallbacks like Python - episode.duration = self.parse_duration_comprehensive(data); - println!("🕐 Final parsed duration for '{}': {} seconds", episode.title, episode.duration); + episode.duration = self.parse_duration_comprehensive(data, estimate_missing_durations); } // Clean and normalize titles like Python version @@ -5125,18 +5833,18 @@ impl DatabasePool { for candidate in artwork_candidates.iter().flatten() { if !candidate.trim().is_empty() && self.is_valid_image_url(candidate) { let cleaned_url = self.validate_and_clean_url(candidate); - println!("🎨 Using artwork: {}", cleaned_url); return cleaned_url; } } // Use default podcast artwork as fallback - println!("🎨 Using default podcast artwork: {}", default_artwork); default_artwork.to_string() } // Comprehensive publication date parsing fn parse_publication_date_comprehensive(&self, data: &HashMap) -> DateTime { + // println!("🔍 DEBUG: Date parsing - episode_data keys: {:?}", data.keys().collect::>()); + // Multiple date field sources let date_candidates = [ data.get("pubDate"), @@ -5147,35 +5855,51 @@ impl DatabasePool { data.get("date"), ]; - for date_str in date_candidates.iter().flatten() { - if let Some(parsed_date) = self.try_parse_date(date_str) { - // Validate date is reasonable (not too far in future, not before 1990) - let now = Utc::now(); - let year_1990 = DateTime::parse_from_rfc3339("1990-01-01T00:00:00Z").unwrap().with_timezone(&Utc); - let one_year_future = now + chrono::Duration::days(365); - - if parsed_date >= year_1990 && parsed_date <= one_year_future { - return parsed_date; + for (i, date_str) in date_candidates.iter().enumerate() { + if let Some(date_str) = date_str { + // println!("📅 DEBUG: Trying date candidate {}: '{}'", i, date_str); + if let Some(parsed_date) = self.try_parse_date(date_str) { + // Validate date is reasonable (not too far in future, not before 1990) + let now = Utc::now(); + let year_1990 = DateTime::parse_from_rfc3339("1990-01-01T00:00:00Z").unwrap().with_timezone(&Utc); + let one_year_future = now + chrono::Duration::days(365); + + if parsed_date >= year_1990 && parsed_date <= one_year_future { + // println!("✅ DEBUG: Successfully parsed date: {}", parsed_date); + return parsed_date; + } else { + // println!("❌ DEBUG: Date {} outside valid range", parsed_date); + } + } else { + // println!("❌ DEBUG: Failed to parse date string: '{}'", date_str); } } } // Fallback to current time like Python version + // println!("⚠️ DEBUG: No valid date found, falling back to current time"); Utc::now() } // Try to parse a date string with multiple formats fn try_parse_date(&self, date_str: &str) -> Option> { let date_str = date_str.trim(); + // println!("🔧 DEBUG: Attempting to parse date: '{}'", date_str); // RFC 2822 format (most common in RSS) if let Ok(parsed) = DateTime::parse_from_rfc2822(date_str) { + // println!("✅ DEBUG: Parsed as RFC 2822: {}", parsed); return Some(parsed.with_timezone(&Utc)); + } else { + // println!("❌ DEBUG: Failed to parse as RFC 2822"); } // RFC 3339/ISO 8601 format if let Ok(parsed) = DateTime::parse_from_rfc3339(date_str) { + // println!("✅ DEBUG: Parsed as RFC 3339: {}", parsed); return Some(parsed.with_timezone(&Utc)); + } else { + // println!("❌ DEBUG: Failed to parse as RFC 3339"); } // Common custom formats found in real feeds @@ -5419,7 +6143,7 @@ impl DatabasePool { } // Comprehensive duration parsing matching Python logic - fn parse_duration_comprehensive(&self, data: &HashMap) -> i32 { + fn parse_duration_comprehensive(&self, data: &HashMap, estimate_missing_durations: bool) -> i32 { // Priority order like Python version let duration_candidates = [ data.get("itunes:duration"), @@ -5446,7 +6170,21 @@ impl DatabasePool { } } - // Default duration + // Only estimate duration from HTTP if we're adding a new episode AND the flag is enabled + if estimate_missing_durations { + if let Some(audio_url) = data.get("enclosure_url") { + // Check if we're in an async context and can make the HTTP request + if let Ok(handle) = tokio::runtime::Handle::try_current() { + if let Some(estimated_duration) = tokio::task::block_in_place(|| { + handle.block_on(self.estimate_duration_from_audio_url_async(audio_url)) + }) { + return estimated_duration; + } + } + } + } + + // Default to 0 if no duration can be determined 0 } @@ -5550,6 +6288,52 @@ impl DatabasePool { let bytes_per_second = (bitrate_kbps * 1000) / 8; (file_size_bytes / bytes_per_second) as i32 } + + // NEW: Estimate duration by fetching HTTP HEAD request to get Content-Length + // This is a fallback for when RSS feeds don't include duration or file size + async fn estimate_duration_from_audio_url_async(&self, audio_url: &str) -> Option { + println!("Attempting to estimate duration from audio URL: {}", audio_url); + + // Build HTTP client with timeout to avoid hanging + let client = match reqwest::Client::builder() + .user_agent("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36") + .timeout(std::time::Duration::from_secs(10)) // Short timeout + .build() + { + Ok(client) => client, + Err(e) => { + println!("Failed to build HTTP client for duration estimation: {}", e); + return None; + } + }; + + // Make HEAD request to get Content-Length without downloading the file + match client.head(audio_url).send().await { + Ok(response) => { + if response.status().is_success() { + if let Some(content_length) = response.headers().get("content-length") { + if let Ok(content_length_str) = content_length.to_str() { + if let Ok(file_size) = content_length_str.parse::() { + if file_size > 1_000_000 { // > 1MB, reasonable audio file + let estimated_duration = self.estimate_duration_from_file_size(file_size); + println!("Estimated duration from file size {}: {} seconds", file_size, estimated_duration); + return Some(estimated_duration); + } + } + } + } + println!("No Content-Length header found in response"); + } else { + println!("HEAD request failed with status: {}", response.status()); + } + } + Err(e) => { + println!("HTTP HEAD request failed for {}: {}", audio_url, e); + } + } + + None + } // Get first episode ID - matches Python get_first_episode_id function @@ -5761,7 +6545,7 @@ impl DatabasePool { .await?; if let Some(row) = row { - Ok(Some(row.try_get("podcastid")?)) + Ok(Some(row.try_get("PodcastID")?)) } else { Ok(None) } @@ -5783,7 +6567,11 @@ impl DatabasePool { "Episodes".episodetitle as episodetitle, "Episodes".episodepubdate as episodepubdate, "Episodes".episodedescription as episodedescription, - "Episodes".episodeartwork as episodeartwork, + CASE + WHEN "Podcasts".usepodcastcoverscustomized = TRUE AND "Podcasts".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + WHEN "Users".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + ELSE "Episodes".episodeartwork + END as episodeartwork, "Episodes".episodeurl as episodeurl, "Episodes".episodeduration as episodeduration, "Podcasts".podcastindexid as podcastindexid, @@ -5798,6 +6586,7 @@ impl DatabasePool { FROM "DownloadedEpisodes" INNER JOIN "Episodes" ON "DownloadedEpisodes".episodeid = "Episodes".episodeid INNER JOIN "Podcasts" ON "Episodes".podcastid = "Podcasts".podcastid + LEFT JOIN "Users" ON "Podcasts".userid = "Users".userid LEFT JOIN "UserEpisodeHistory" ON "DownloadedEpisodes".episodeid = "UserEpisodeHistory".episodeid AND "DownloadedEpisodes".userid = "UserEpisodeHistory".userid @@ -5820,7 +6609,11 @@ impl DatabasePool { "YouTubeVideos".videotitle as episodetitle, "YouTubeVideos".publishedat as episodepubdate, "YouTubeVideos".videodescription as episodedescription, - "YouTubeVideos".thumbnailurl as episodeartwork, + CASE + WHEN "Podcasts".usepodcastcoverscustomized = TRUE AND "Podcasts".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + WHEN "Users".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + ELSE "YouTubeVideos".thumbnailurl + END as episodeartwork, "YouTubeVideos".videourl as episodeurl, "YouTubeVideos".duration as episodeduration, "Podcasts".podcastindexid as podcastindexid, @@ -5835,6 +6628,7 @@ impl DatabasePool { FROM "DownloadedVideos" INNER JOIN "YouTubeVideos" ON "DownloadedVideos".videoid = "YouTubeVideos".videoid INNER JOIN "Podcasts" ON "YouTubeVideos".podcastid = "Podcasts".podcastid + LEFT JOIN "Users" ON "Podcasts".userid = "Users".userid LEFT JOIN "SavedVideos" ON "DownloadedVideos".videoid = "SavedVideos".videoid AND "SavedVideos".userid = $4 @@ -5895,7 +6689,11 @@ impl DatabasePool { Episodes.EpisodeTitle as episodetitle, Episodes.EpisodePubDate as episodepubdate, Episodes.EpisodeDescription as episodedescription, - Episodes.EpisodeArtwork as episodeartwork, + CASE + WHEN Podcasts.UsePodcastCoversCustomized = 1 AND Podcasts.UsePodcastCovers = 1 THEN Podcasts.ArtworkURL + WHEN Users.UsePodcastCovers = 1 THEN Podcasts.ArtworkURL + ELSE Episodes.EpisodeArtwork + END as episodeartwork, Episodes.EpisodeURL as episodeurl, Episodes.EpisodeDuration as episodeduration, Podcasts.PodcastIndexID as podcastindexid, @@ -5910,6 +6708,7 @@ impl DatabasePool { FROM DownloadedEpisodes INNER JOIN Episodes ON DownloadedEpisodes.EpisodeID = Episodes.EpisodeID INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID + LEFT JOIN Users ON Podcasts.UserID = Users.UserID LEFT JOIN UserEpisodeHistory ON DownloadedEpisodes.EpisodeID = UserEpisodeHistory.EpisodeID AND DownloadedEpisodes.UserID = UserEpisodeHistory.UserID @@ -5932,7 +6731,11 @@ impl DatabasePool { YouTubeVideos.VideoTitle as episodetitle, YouTubeVideos.PublishedAt as episodepubdate, YouTubeVideos.VideoDescription as episodedescription, - YouTubeVideos.ThumbnailURL as episodeartwork, + CASE + WHEN Podcasts.UsePodcastCoversCustomized = 1 AND Podcasts.UsePodcastCovers = 1 THEN Podcasts.ArtworkURL + WHEN Users.UsePodcastCovers = 1 THEN Podcasts.ArtworkURL + ELSE YouTubeVideos.ThumbnailURL + END as episodeartwork, YouTubeVideos.VideoURL as episodeurl, YouTubeVideos.Duration as episodeduration, Podcasts.PodcastIndexID as podcastindexid, @@ -5947,6 +6750,7 @@ impl DatabasePool { FROM DownloadedVideos INNER JOIN YouTubeVideos ON DownloadedVideos.VideoID = YouTubeVideos.VideoID INNER JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID + LEFT JOIN Users ON Podcasts.UserID = Users.UserID LEFT JOIN SavedVideos ON DownloadedVideos.VideoID = SavedVideos.VideoID AND SavedVideos.UserID = ? @@ -6167,11 +6971,17 @@ impl DatabasePool { r#"SELECT "Podcasts".podcastid, "Podcasts".podcastname, "Episodes".episodeid, "Episodes".episodetitle, "Episodes".episodepubdate, "Episodes".episodedescription, - "Episodes".episodeartwork, "Episodes".episodeurl, "Episodes".episodeduration, + CASE + WHEN "Podcasts".usepodcastcoverscustomized = TRUE AND "Podcasts".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + WHEN "Users".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + ELSE "Episodes".episodeartwork + END as episodeartwork, + "Episodes".episodeurl, "Episodes".episodeduration, "Episodes".completed, "UserEpisodeHistory".listenduration, CAST("Episodes".episodeid AS VARCHAR) AS guid FROM "Episodes" INNER JOIN "Podcasts" ON "Episodes".podcastid = "Podcasts".podcastid + LEFT JOIN "Users" ON "Podcasts".userid = "Users".userid LEFT JOIN "UserEpisodeHistory" ON "Episodes".episodeid = "UserEpisodeHistory".episodeid AND "UserEpisodeHistory".userid = $1 WHERE "Podcasts".podcastid = $2 AND "Podcasts".userid = $3 ORDER BY "Episodes".episodepubdate DESC"# @@ -6211,11 +7021,17 @@ impl DatabasePool { "SELECT Podcasts.PodcastID, Podcasts.PodcastName, Episodes.EpisodeID, Episodes.EpisodeTitle, Episodes.EpisodePubDate, Episodes.EpisodeDescription, - Episodes.EpisodeArtwork, Episodes.EpisodeURL, Episodes.EpisodeDuration, + CASE + WHEN Podcasts.UsePodcastCoversCustomized = TRUE AND Podcasts.UsePodcastCovers = TRUE THEN Podcasts.ArtworkURL + WHEN Users.UsePodcastCovers = TRUE THEN Podcasts.ArtworkURL + ELSE Episodes.EpisodeArtwork + END as EpisodeArtwork, + Episodes.EpisodeURL, Episodes.EpisodeDuration, Episodes.Completed, UserEpisodeHistory.ListenDuration, CAST(Episodes.EpisodeID AS CHAR) AS guid FROM Episodes INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID + LEFT JOIN Users ON Podcasts.UserID = Users.UserID LEFT JOIN UserEpisodeHistory ON Episodes.EpisodeID = UserEpisodeHistory.EpisodeID AND UserEpisodeHistory.UserID = ? WHERE Podcasts.PodcastID = ? AND Podcasts.UserID = ? ORDER BY Episodes.EpisodePubDate DESC" @@ -6232,7 +7048,7 @@ impl DatabasePool { podcastname: row.try_get("PodcastName")?, episodetitle: row.try_get("EpisodeTitle")?, episodepubdate: { - let datetime = row.try_get::, _>("EpisodePubDate")?; + let datetime = row.try_get::, _>("EpisodePubDate")?; datetime.format("%Y-%m-%dT%H:%M:%S").to_string() }, episodedescription: row.try_get("EpisodeDescription")?, @@ -6241,7 +7057,7 @@ impl DatabasePool { episodeduration: row.try_get("EpisodeDuration")?, listenduration: row.try_get("ListenDuration").ok(), episodeid: row.try_get("EpisodeID")?, - completed: row.try_get("Completed")?, + completed: row.try_get::("Completed")? != 0, saved: false, // Not included in this query queued: false, // Not included in this query downloaded: false, // Not included in this query @@ -6376,7 +7192,7 @@ impl DatabasePool { "podcastindexid": row.try_get::, _>("podcastindexid")?, "feedurl": row.try_get::("feedurl").unwrap_or_default(), "podcastname": row.try_get::("podcastname")?, - "artworkurl": row.try_get::("artworkurl")?, + "artworkurl": row.try_get::, _>("artworkurl").unwrap_or_default().unwrap_or_default(), "episodetitle": row.try_get::("episodetitle")?, "episodepubdate": episode_pubdate, "episodedescription": row.try_get::("episodedescription")?, @@ -6431,10 +7247,14 @@ impl DatabasePool { let episode_pubdate = naive_date.format("%Y-%m-%dT%H:%M:%S").to_string(); let real_episode_id: i32 = row.try_get("real_episode_id")?; - // Get listen history using the real episode ID + // Get listen history using the real episode ID - matches Python implementation let listen_history = sqlx::query( - r#"SELECT listenduration, completed FROM "UserEpisodeHistory" - WHERE episodeid = $1 AND userid = $2"# + r#"SELECT "UserEpisodeHistory".listenduration, "Episodes".completed + FROM "Episodes" + LEFT JOIN "UserEpisodeHistory" ON + "Episodes".episodeid = "UserEpisodeHistory".episodeid + AND "UserEpisodeHistory".userid = $2 + WHERE "Episodes".episodeid = $1"# ) .bind(real_episode_id) .bind(user_id) @@ -6455,7 +7275,7 @@ impl DatabasePool { "podcastindexid": row.try_get::, _>("podcastindexid")?, "feedurl": row.try_get::("feedurl").unwrap_or_default(), "podcastname": row.try_get::("podcastname")?, - "artworkurl": row.try_get::("artworkurl")?, + "artworkurl": row.try_get::, _>("artworkurl").unwrap_or_default().unwrap_or_default(), "episodetitle": row.try_get::("episodetitle")?, "episodepubdate": episode_pubdate, "episodedescription": row.try_get::("episodedescription")?, @@ -6512,7 +7332,7 @@ impl DatabasePool { "podcastindexid": row.try_get::, _>("podcastindexid")?, "feedurl": row.try_get::("feedurl").unwrap_or_default(), "podcastname": row.try_get::("podcastname")?, - "artworkurl": row.try_get::("artworkurl")?, + "artworkurl": row.try_get::, _>("artworkurl").unwrap_or_default().unwrap_or_default(), "episodetitle": row.try_get::("episodetitle")?, "episodepubdate": episode_pubdate, "episodedescription": row.try_get::("episodedescription")?, @@ -6534,21 +7354,35 @@ impl DatabasePool { DatabasePool::MySQL(pool) => { let row = sqlx::query( r#"SELECT - Episodes.EpisodeTitle as title, - Episodes.EpisodeDescription as description, - Episodes.EpisodeURL as episode_url, - Episodes.EpisodeArtwork as artwork_url, - Episodes.EpisodeDuration as duration, - Episodes.EpisodePubDate as pub_date, - Podcasts.PodcastName as podcast_name, - Podcasts.ArtworkURL as podcast_artwork, - UserEpisodeHistory.ListenDuration as listen_duration, - UserEpisodeHistory.Completed as completed - FROM Episodes - INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID - LEFT JOIN UserEpisodeHistory ON Episodes.EpisodeID = UserEpisodeHistory.EpisodeID AND UserEpisodeHistory.UserID = ? - WHERE Episodes.EpisodeID = ? AND Podcasts.UserID = ?"# - ) + Podcasts.PodcastID, + Podcasts.PodcastIndexID, + Podcasts.FeedURL, + Podcasts.PodcastName, + Podcasts.ArtworkURL, + Episodes.EpisodeTitle, + Episodes.EpisodePubDate, + Episodes.EpisodeDescription, + Episodes.EpisodeArtwork, + Episodes.EpisodeURL, + Episodes.EpisodeDuration, + Episodes.EpisodeID, + UserEpisodeHistory.ListenDuration, + Episodes.Completed, + CASE WHEN q.EpisodeID IS NOT NULL THEN 1 ELSE 0 END as is_queued, + CASE WHEN s.EpisodeID IS NOT NULL THEN 1 ELSE 0 END as is_saved, + CASE WHEN d.EpisodeID IS NOT NULL THEN 1 ELSE 0 END as is_downloaded, + 0 as is_youtube + FROM Episodes + INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID + LEFT JOIN UserEpisodeHistory ON Episodes.EpisodeID = UserEpisodeHistory.EpisodeID AND UserEpisodeHistory.UserID = ? + LEFT JOIN EpisodeQueue q ON Episodes.EpisodeID = q.EpisodeID AND q.UserID = ? + LEFT JOIN SavedEpisodes s ON Episodes.EpisodeID = s.EpisodeID AND s.UserID = ? + LEFT JOIN DownloadedEpisodes d ON Episodes.EpisodeID = d.EpisodeID AND d.UserID = ? + WHERE Episodes.EpisodeID = ? AND Podcasts.UserID = ?"# + ) + .bind(user_id) + .bind(user_id) + .bind(user_id) .bind(user_id) .bind(episode_id) .bind(user_id) @@ -6556,20 +7390,27 @@ impl DatabasePool { .await?; if let Some(row) = row { - let naive_date = row.try_get::("pub_date")?; - let pub_date = chrono::DateTime::::from_naive_utc_and_offset(naive_date, chrono::Utc); + let datetime = row.try_get::, _>("EpisodePubDate")?; Ok(serde_json::json!({ - "title": row.try_get::("title")?, - "description": row.try_get::("description")?, - "episode_url": row.try_get::("episode_url")?, - "artwork_url": row.try_get::("artwork_url")?, - "duration": row.try_get::("duration")?, - "pub_date": pub_date.to_rfc3339(), - "podcast_name": row.try_get::("podcast_name")?, - "podcast_artwork": row.try_get::("podcast_artwork")?, - "listen_duration": row.try_get::, _>("listen_duration")?, - "completed": row.try_get::, _>("completed")?.unwrap_or(false) + "podcastid": row.try_get::("PodcastID")?, + "podcastindexid": row.try_get::, _>("PodcastIndexID")?, + "feedurl": row.try_get::("FeedURL").unwrap_or_default(), + "podcastname": row.try_get::("PodcastName")?, + "artworkurl": row.try_get::, _>("ArtworkURL").unwrap_or_default().unwrap_or_default(), + "episodetitle": row.try_get::("EpisodeTitle")?, + "episodepubdate": datetime.to_rfc3339(), + "episodedescription": row.try_get::("EpisodeDescription")?, + "episodeartwork": row.try_get::("EpisodeArtwork")?, + "episodeurl": row.try_get::("EpisodeURL")?, + "episodeduration": row.try_get::("EpisodeDuration")?, + "episodeid": row.try_get::("EpisodeID")?, + "listenduration": row.try_get::, _>("ListenDuration")?, + "completed": row.try_get::("Completed")? != 0, + "is_queued": row.try_get::("is_queued")? != 0, + "is_saved": row.try_get::("is_saved")? != 0, + "is_downloaded": row.try_get::("is_downloaded")? != 0, + "is_youtube": row.try_get::("is_youtube")? != 0, })) } else { Err(AppError::not_found("Episode not found")) @@ -7342,10 +8183,10 @@ impl DatabasePool { Ok(serde_json::Value::Array(value_blocks)) } - // Check if API key is web key - matches Python is_web_key check - pub async fn is_web_key(&self, api_key: &str) -> AppResult { - // This would need to be implemented based on your web key configuration - // For now, return false - implement according to your Python logic + // Legacy web key check - no longer used but kept for compatibility + // All authentication now uses User ID 1 (background_tasks) for system operations + pub async fn is_web_key(&self, _api_key: &str) -> AppResult { + // Always return false - web keys are deprecated in favor of background_tasks user (ID 1) Ok(false) } @@ -7702,7 +8543,7 @@ impl DatabasePool { } // Get play episode details - matches Python get_play_episode_details function exactly - pub async fn get_play_episode_details(&self, user_id: i32, podcast_id: i32, is_youtube: bool) -> AppResult<(f64, i32, i32)> { + pub async fn get_play_episode_details(&self, user_id: i32, podcast_id: i32, _is_youtube: bool) -> AppResult<(f64, i32, i32)> { match self { DatabasePool::Postgres(pool) => { // First get user's default playback speed @@ -7761,7 +8602,11 @@ impl DatabasePool { .await?; let user_playback_speed = if let Some(row) = user_row { - row.try_get::, _>("PlaybackSpeed")?.unwrap_or(1.0) + if let Ok(speed) = row.try_get::, _>("PlaybackSpeed") { + speed.map(|s| s.to_f64().unwrap_or(1.0)).unwrap_or(1.0) + } else { + 1.0 + } } else { 1.0 }; @@ -7779,7 +8624,11 @@ impl DatabasePool { if let Some(row) = podcast_row { let playback_speed_customized: Option = row.try_get("PlaybackSpeedCustomized")?; - let podcast_playback_speed: Option = row.try_get("PlaybackSpeed")?; + let podcast_playback_speed: Option = if let Ok(speed) = row.try_get::, _>("PlaybackSpeed") { + speed.map(|s| s.to_f64().unwrap_or(1.0)) + } else { + None + }; let start_skip: Option = row.try_get("StartSkip")?; let end_skip: Option = row.try_get("EndSkip")?; @@ -7808,7 +8657,11 @@ impl DatabasePool { "Episodes".episodetitle as "Episodetitle", "Episodes".episodepubdate as "Episodepubdate", "Episodes".episodedescription as "Episodedescription", - "Episodes".episodeartwork as "Episodeartwork", + CASE + WHEN "Podcasts".usepodcastcoverscustomized = TRUE AND "Podcasts".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + WHEN "Users".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + ELSE "Episodes".episodeartwork + END as "Episodeartwork", "Episodes".episodeurl as "Episodeurl", "Episodes".episodeduration as "Episodeduration", "UserEpisodeHistory".listenduration as "Listenduration", @@ -7820,6 +8673,7 @@ impl DatabasePool { FALSE as is_youtube FROM "Episodes" INNER JOIN "Podcasts" ON "Episodes".podcastid = "Podcasts".podcastid + LEFT JOIN "Users" ON "Podcasts".userid = "Users".userid LEFT JOIN "UserEpisodeHistory" ON "Episodes".episodeid = "UserEpisodeHistory".episodeid AND "UserEpisodeHistory".userid = $1 @@ -7833,7 +8687,14 @@ impl DatabasePool { LEFT JOIN "DownloadedEpisodes" ON "Episodes".episodeid = "DownloadedEpisodes".episodeid AND "DownloadedEpisodes".userid = $1 - WHERE "Podcasts".userid = $1 AND "Podcasts".podcastid = $2 + WHERE "Podcasts".userid = $1 AND ( + "Podcasts".podcastid = $2 OR + "Podcasts".podcastid IN ( + SELECT jsonb_array_elements_text(COALESCE(mergedpodcastids::jsonb, '[]'::jsonb))::int + FROM "Podcasts" p2 + WHERE p2.podcastid = $2 AND p2.userid = $1 + ) + ) UNION ALL @@ -7842,7 +8703,11 @@ impl DatabasePool { "YouTubeVideos".videotitle as "Episodetitle", "YouTubeVideos".publishedat as "Episodepubdate", "YouTubeVideos".videodescription as "Episodedescription", - "YouTubeVideos".thumbnailurl as "Episodeartwork", + CASE + WHEN "Podcasts".usepodcastcoverscustomized = TRUE AND "Podcasts".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + WHEN "Users".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + ELSE "YouTubeVideos".thumbnailurl + END as "Episodeartwork", "YouTubeVideos".videourl as "Episodeurl", "YouTubeVideos".duration as "Episodeduration", "YouTubeVideos".listenposition as "Listenduration", @@ -7854,6 +8719,7 @@ impl DatabasePool { TRUE as is_youtube FROM "YouTubeVideos" INNER JOIN "Podcasts" ON "YouTubeVideos".podcastid = "Podcasts".podcastid + LEFT JOIN "Users" ON "Podcasts".userid = "Users".userid LEFT JOIN "SavedVideos" ON "YouTubeVideos".videoid = "SavedVideos".videoid AND "SavedVideos".userid = $1 @@ -7864,7 +8730,14 @@ impl DatabasePool { LEFT JOIN "DownloadedVideos" ON "YouTubeVideos".videoid = "DownloadedVideos".videoid AND "DownloadedVideos".userid = $1 - WHERE "Podcasts".userid = $1 AND "Podcasts".podcastid = $2 + WHERE "Podcasts".userid = $1 AND ( + "Podcasts".podcastid = $2 OR + "Podcasts".podcastid IN ( + SELECT jsonb_array_elements_text(COALESCE(mergedpodcastids::jsonb, '[]'::jsonb))::int + FROM "Podcasts" p2 + WHERE p2.podcastid = $2 AND p2.userid = $1 + ) + ) ) combined ORDER BY "Episodepubdate" DESC"# ) @@ -7899,8 +8772,141 @@ impl DatabasePool { Ok(episodes) } DatabasePool::MySQL(pool) => { - // MySQL version would go here - similar structure but without quoted table names - Ok(vec![]) + let rows = sqlx::query( + "SELECT * FROM ( + SELECT + Podcasts.PodcastName as podcastname, + Episodes.EpisodeTitle as Episodetitle, + Episodes.EpisodePubDate as Episodepubdate, + Episodes.EpisodeDescription as Episodedescription, + CASE + WHEN Podcasts.UsePodcastCoversCustomized = TRUE AND Podcasts.UsePodcastCovers = TRUE THEN Podcasts.ArtworkURL + WHEN Users.UsePodcastCovers = TRUE THEN Podcasts.ArtworkURL + ELSE Episodes.EpisodeArtwork + END as Episodeartwork, + Episodes.EpisodeURL as Episodeurl, + Episodes.EpisodeDuration as Episodeduration, + UserEpisodeHistory.ListenDuration as Listenduration, + Episodes.EpisodeID as Episodeid, + Episodes.Completed as Completed, + CASE WHEN SavedEpisodes.EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS saved, + CASE WHEN EpisodeQueue.EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS queued, + CASE WHEN DownloadedEpisodes.EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded, + FALSE as is_youtube + FROM Episodes + INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID + LEFT JOIN Users ON Podcasts.UserID = Users.UserID + LEFT JOIN UserEpisodeHistory ON + Episodes.EpisodeID = UserEpisodeHistory.EpisodeID + AND UserEpisodeHistory.UserID = ? + LEFT JOIN SavedEpisodes ON + Episodes.EpisodeID = SavedEpisodes.EpisodeID + AND SavedEpisodes.UserID = ? + LEFT JOIN EpisodeQueue ON + Episodes.EpisodeID = EpisodeQueue.EpisodeID + AND EpisodeQueue.UserID = ? + AND EpisodeQueue.is_youtube = FALSE + LEFT JOIN DownloadedEpisodes ON + Episodes.EpisodeID = DownloadedEpisodes.EpisodeID + AND DownloadedEpisodes.UserID = ? + WHERE Podcasts.UserID = ? AND ( + Podcasts.PodcastID = ? OR + Podcasts.PodcastID IN ( + SELECT CAST(JSON_UNQUOTE(JSON_EXTRACT(COALESCE(p2.MergedPodcastIDs, '[]'), CONCAT('$[', numbers.n, ']'))) AS UNSIGNED) + FROM (SELECT 0 as n UNION SELECT 1 UNION SELECT 2 UNION SELECT 3 UNION SELECT 4 UNION SELECT 5 UNION SELECT 6 UNION SELECT 7 UNION SELECT 8 UNION SELECT 9) numbers + INNER JOIN Podcasts p2 ON p2.PodcastID = ? AND p2.UserID = ? + WHERE JSON_UNQUOTE(JSON_EXTRACT(COALESCE(p2.MergedPodcastIDs, '[]'), CONCAT('$[', numbers.n, ']'))) IS NOT NULL + ) + ) + + UNION ALL + + SELECT + Podcasts.PodcastName as podcastname, + YouTubeVideos.VideoTitle as Episodetitle, + YouTubeVideos.PublishedAt as Episodepubdate, + YouTubeVideos.VideoDescription as Episodedescription, + CASE + WHEN Podcasts.UsePodcastCoversCustomized = TRUE AND Podcasts.UsePodcastCovers = TRUE THEN Podcasts.ArtworkURL + WHEN Users.UsePodcastCovers = TRUE THEN Podcasts.ArtworkURL + ELSE YouTubeVideos.ThumbnailURL + END as Episodeartwork, + YouTubeVideos.VideoURL as Episodeurl, + YouTubeVideos.Duration as Episodeduration, + YouTubeVideos.ListenPosition as Listenduration, + YouTubeVideos.VideoID as Episodeid, + YouTubeVideos.Completed as Completed, + CASE WHEN SavedVideos.VideoID IS NOT NULL THEN TRUE ELSE FALSE END AS saved, + CASE WHEN EpisodeQueue.EpisodeID IS NOT NULL AND EpisodeQueue.is_youtube = TRUE THEN TRUE ELSE FALSE END AS queued, + CASE WHEN DownloadedVideos.VideoID IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded, + TRUE as is_youtube + FROM YouTubeVideos + INNER JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID + LEFT JOIN Users ON Podcasts.UserID = Users.UserID + LEFT JOIN SavedVideos ON + YouTubeVideos.VideoID = SavedVideos.VideoID + AND SavedVideos.UserID = ? + LEFT JOIN EpisodeQueue ON + YouTubeVideos.VideoID = EpisodeQueue.EpisodeID + AND EpisodeQueue.UserID = ? + AND EpisodeQueue.is_youtube = TRUE + LEFT JOIN DownloadedVideos ON + YouTubeVideos.VideoID = DownloadedVideos.VideoID + AND DownloadedVideos.UserID = ? + WHERE Podcasts.UserID = ? AND ( + Podcasts.PodcastID = ? OR + Podcasts.PodcastID IN ( + SELECT CAST(JSON_UNQUOTE(JSON_EXTRACT(COALESCE(p2.MergedPodcastIDs, '[]'), CONCAT('$[', numbers.n, ']'))) AS UNSIGNED) + FROM (SELECT 0 as n UNION SELECT 1 UNION SELECT 2 UNION SELECT 3 UNION SELECT 4 UNION SELECT 5 UNION SELECT 6 UNION SELECT 7 UNION SELECT 8 UNION SELECT 9) numbers + INNER JOIN Podcasts p2 ON p2.PodcastID = ? AND p2.UserID = ? + WHERE JSON_UNQUOTE(JSON_EXTRACT(COALESCE(p2.MergedPodcastIDs, '[]'), CONCAT('$[', numbers.n, ']'))) IS NOT NULL + ) + ) + ) combined + ORDER BY Episodepubdate DESC" + ) + .bind(user_id) + .bind(user_id) + .bind(user_id) + .bind(user_id) + .bind(user_id) + .bind(podcast_id) + .bind(podcast_id) + .bind(user_id) + .bind(user_id) + .bind(user_id) + .bind(user_id) + .bind(user_id) + .bind(podcast_id) + .bind(podcast_id) + .bind(user_id) + .fetch_all(pool) + .await?; + + let mut episodes = Vec::new(); + for row in rows { + let datetime = row.try_get::, _>("Episodepubdate")?; + let episodepubdate = datetime.format("%Y-%m-%dT%H:%M:%S").to_string(); + + episodes.push(crate::handlers::podcasts::PodcastEpisode { + podcastname: row.try_get("podcastname")?, + episodetitle: row.try_get("Episodetitle")?, + episodepubdate, + episodedescription: row.try_get("Episodedescription")?, + episodeartwork: row.try_get("Episodeartwork")?, + episodeurl: row.try_get("Episodeurl")?, + episodeduration: row.try_get("Episodeduration")?, + listenduration: row.try_get("Listenduration").ok(), + episodeid: row.try_get("Episodeid")?, + completed: row.try_get::("Completed")? != 0, + saved: row.try_get::("saved")? != 0, + queued: row.try_get::("queued")? != 0, + downloaded: row.try_get::("downloaded")? != 0, + is_youtube: row.try_get::("is_youtube")? != 0, + }); + } + + Ok(episodes) } } } @@ -8213,7 +9219,7 @@ impl DatabasePool { match self { DatabasePool::Postgres(pool) => { let row = sqlx::query( - r#"SELECT userid, fullname, username, email, CASE WHEN isadmin THEN 1 ELSE 0 END AS isadmin FROM "Users" WHERE userid = $1"# + r#"SELECT userid, fullname, username, email, CASE WHEN isadmin THEN 1 ELSE 0 END AS isadmin, timezone, timeformat, dateformat, language FROM "Users" WHERE userid = $1"# ) .bind(user_id) .fetch_optional(pool) @@ -8226,6 +9232,10 @@ impl DatabasePool { "username": row.try_get::("username")?, "email": row.try_get::("email")?, "isadmin": row.try_get::("isadmin")?, + "timezone": row.try_get::("timezone")?, + "timeformat": row.try_get::("timeformat")?, + "dateformat": row.try_get::("dateformat")?, + "language": row.try_get::("language")?, }))) } else { Ok(None) @@ -8233,7 +9243,7 @@ impl DatabasePool { } DatabasePool::MySQL(pool) => { let row = sqlx::query( - "SELECT UserID as userid, Fullname as fullname, Username as username, Email as email, IsAdmin as isadmin FROM Users WHERE UserID = ?" + "SELECT UserID as userid, Fullname as fullname, Username as username, Email as email, IsAdmin as isadmin, TimeZone as timezone, TimeFormat as timeformat, DateFormat as dateformat, Language as language FROM Users WHERE UserID = ?" ) .bind(user_id) .fetch_optional(pool) @@ -8246,6 +9256,10 @@ impl DatabasePool { "username": row.try_get::("username")?, "email": row.try_get::("email")?, "isadmin": row.try_get::("isadmin")?, + "timezone": row.try_get::("timezone")?, + "timeformat": row.try_get::("timeformat")?, + "dateformat": row.try_get::("dateformat")?, + "language": row.try_get::("language")?, }))) } else { Ok(None) @@ -8277,6 +9291,12 @@ impl DatabasePool { .execute(pool) .await?; + // Create default playlists for the new user + if let Err(e) = self.create_default_playlists_for_user(user_id).await { + tracing::warn!("⚠️ Failed to create default playlists for new user {}: {}", user_id, e); + // Don't fail user creation if playlist creation fails + } + Ok(user_id) } DatabasePool::MySQL(pool) => { @@ -8299,6 +9319,12 @@ impl DatabasePool { .execute(pool) .await?; + // Create default playlists for the new user + if let Err(e) = self.create_default_playlists_for_user(user_id).await { + tracing::warn!("⚠️ Failed to create default playlists for new user {}: {}", user_id, e); + // Don't fail user creation if playlist creation fails + } + Ok(user_id) } } @@ -8386,6 +9412,12 @@ impl DatabasePool { .execute(pool) .await.ok(); + // CRITICAL: Delete user's playlists to avoid foreign key constraint violations + sqlx::query(r#"DELETE FROM "Playlists" WHERE userid = $1"#) + .bind(user_id) + .execute(pool) + .await.ok(); + // Delete from Users (main table) sqlx::query(r#"DELETE FROM "Users" WHERE userid = $1"#) .bind(user_id) @@ -8429,6 +9461,12 @@ impl DatabasePool { .execute(pool) .await.ok(); + // CRITICAL: Delete user's playlists to avoid foreign key constraint violations + sqlx::query("DELETE FROM Playlists WHERE UserID = ?") + .bind(user_id) + .execute(pool) + .await.ok(); + // Delete from Users (main table) sqlx::query("DELETE FROM Users WHERE UserID = ?") .bind(user_id) @@ -8925,10 +9963,10 @@ impl DatabasePool { abcdefghijklmnopqrstuvwxyz\ 0123456789"; let api_key: String = { - let mut rng = rand::thread_rng(); + let mut rng = rand::rng(); (0..64) .map(|_| { - let idx = rng.gen_range(0..charset.len()); + let idx = rng.random_range(0..charset.len()); charset[idx] as char }) .collect() @@ -8956,11 +9994,11 @@ impl DatabasePool { // Create RSS key - matches Python create_rss_key function exactly pub async fn create_rss_key(&self, user_id: i32, podcast_ids: Option>) -> AppResult { - use rand::{Rng, thread_rng}; + use rand::Rng; use rand::distr::Alphanumeric; // Generate 64-character RSS key - let rss_key: String = rand::thread_rng() + let rss_key: String = rand::rng() .sample_iter(&Alphanumeric) .take(64) .map(char::from) @@ -9122,7 +10160,7 @@ impl DatabasePool { let podcasts = match self { DatabasePool::Postgres(pool) => { let rows = sqlx::query( - r#"SELECT podcastname, feedurl FROM "Podcasts" WHERE userid = $1"# + r#"SELECT podcastname, feedurl FROM "Podcasts" WHERE userid = $1 AND (username IS NULL OR username = '') AND (password IS NULL OR password = '')"# ) .bind(user_id) .fetch_all(pool) @@ -9139,7 +10177,7 @@ impl DatabasePool { } DatabasePool::MySQL(pool) => { let rows = sqlx::query( - "SELECT PodcastName, FeedURL FROM Podcasts WHERE UserID = ?" + "SELECT PodcastName, FeedURL FROM Podcasts WHERE UserID = ? AND (Username IS NULL OR Username = '') AND (Password IS NULL OR Password = '')" ) .bind(user_id) .fetch_all(pool) @@ -9245,8 +10283,7 @@ pub struct PublicOidcProvider { // Nextcloud login data structure #[derive(Debug, Clone)] pub struct NextcloudLoginData { - pub login_url: String, - pub token: String, + pub raw_response: serde_json::Value, } // Sync result structure @@ -9366,6 +10403,8 @@ impl DatabasePool { .arg("--port").arg(&port) .arg("--user").arg(&username) .arg(format!("--password={}", password)) + .arg("--skip-ssl") + .arg("--default-auth=mysql_native_password") .arg(&database) .stdin(std::process::Stdio::piped()) .stdout(std::process::Stdio::piped()) @@ -9437,8 +10476,16 @@ impl DatabasePool { // Clear each table for table in tables { - let table_name: String = table.try_get("TABLE_NAME")?; - let query = format!("TRUNCATE TABLE `{}`", table_name); + // Handle both VARCHAR and VARBINARY cases for TABLE_NAME + let table_name: String = if let Ok(name) = table.try_get::("TABLE_NAME") { + name + } else if let Ok(name_bytes) = table.try_get::, _>("TABLE_NAME") { + String::from_utf8(name_bytes).map_err(|_| AppError::internal("Invalid table name encoding"))? + } else { + return Err(AppError::internal("Could not retrieve table name")); + }; + // Use DELETE instead of TRUNCATE to avoid foreign key constraint issues + let query = format!("DELETE FROM `{}`", table_name); sqlx::query(&query).execute(pool).await?; } @@ -9529,14 +10576,13 @@ impl DatabasePool { // Generate MFA secret with QR code - matches Python generate_mfa_secret function exactly pub async fn generate_mfa_secret(&self, user_id: i32) -> AppResult<(String, String)> { use totp_rs::{Algorithm, Secret, TOTP}; - use qrcode::{QrCode, Color}; - use qrcode::render::svg; + use qrcode::QrCode; use rand::Rng; // Generate random base32 secret (matches Python random_base32()) let secret = { - let mut rng = rand::thread_rng(); - let secret_bytes: [u8; 20] = rng.gen(); // 160 bits = 32 base32 chars + let mut rng = rand::rng(); + let secret_bytes: [u8; 20] = rng.random(); // 160 bits = 32 base32 chars Secret::Raw(secret_bytes.to_vec()).to_encoded().to_string() }; @@ -9741,14 +10787,9 @@ impl DatabasePool { let json: serde_json::Value = response.json().await .map_err(|e| AppError::internal(&format!("Failed to parse Nextcloud response: {}", e)))?; - let poll_token = json["poll"]["token"].as_str() - .ok_or_else(|| AppError::internal("Missing poll token in Nextcloud response"))?; - let login_url = json["login"].as_str() - .ok_or_else(|| AppError::internal("Missing login URL in Nextcloud response"))?; - + // Return the raw JSON response from Nextcloud to match Python behavior Ok(NextcloudLoginData { - login_url: login_url.to_string(), - token: poll_token.to_string(), + raw_response: json, }) } @@ -9804,61 +10845,162 @@ impl DatabasePool { } } + // Perform initial full sync for Nextcloud to get ALL user subscriptions + if let Err(e) = self.call_nextcloud_initial_full_sync(user_id, nextcloud_url, login_name, app_password).await { + tracing::warn!("Initial Nextcloud full sync failed during setup: {}", e); + // Don't fail setup if initial sync fails + } + Ok(true) } - - // Verify gPodder authentication - matches Python verify_gpodder_auth function exactly - pub async fn verify_gpodder_auth(&self, gpodder_url: &str, username: &str, password: &str) -> AppResult { - let client = reqwest::Client::new(); - - // Test authentication with gPodder API - let auth_url = format!("{}/api/2/auth/{}/login.json", gpodder_url.trim_end_matches('/'), username); + + // Save Nextcloud credentials - helper method for background polling + pub async fn save_nextcloud_credentials(&self, user_id: i32, nextcloud_url: &str, app_password: &str, login_name: &str) -> AppResult<()> { + // Encrypt the app password + let encrypted_password = self.encrypt_password(app_password).await?; - let response = client - .post(&auth_url) - .basic_auth(username, Some(password)) - .send() - .await - .map_err(|e| AppError::internal(&format!("Failed to verify gPodder auth: {}", e)))?; + // Store Nextcloud credentials + match self { + DatabasePool::Postgres(pool) => { + sqlx::query(r#"UPDATE "Users" SET gpodderurl = $1, gpodderloginname = $2, gpoddertoken = $3, pod_sync_type = 'nextcloud' WHERE userid = $4"#) + .bind(nextcloud_url) + .bind(login_name) + .bind(&encrypted_password) + .bind(user_id) + .execute(pool) + .await?; + } + DatabasePool::MySQL(pool) => { + sqlx::query("UPDATE Users SET GpodderUrl = ?, GpodderLoginName = ?, GpodderToken = ?, Pod_Sync_Type = 'nextcloud' WHERE UserID = ?") + .bind(nextcloud_url) + .bind(login_name) + .bind(&encrypted_password) + .bind(user_id) + .execute(pool) + .await?; + } + } - Ok(response.status().is_success()) + Ok(()) } + // Add gPodder server - matches Python add_gpodder_server function exactly pub async fn add_gpodder_server(&self, user_id: i32, gpodder_url: &str, username: &str, password: &str) -> AppResult { // Encrypt the password let encrypted_password = self.encrypt_password(password).await?; + // Try to get devices from the external server to set a default device + let default_device_name = match self.get_first_device_from_server(gpodder_url, username, password).await { + Ok(device_name) => Some(device_name), + Err(e) => { + tracing::warn!("Could not get default device from external GPodder server: {}", e); + None + } + }; + // Store gPodder credentials match self { DatabasePool::Postgres(pool) => { - sqlx::query(r#"UPDATE "Users" SET gpodderurl = $1, gpodderloginname = $2, gpoddertoken = $3, pod_sync_type = 'external' WHERE userid = $4"#) - .bind(gpodder_url) - .bind(username) - .bind(&encrypted_password) - .bind(user_id) - .execute(pool) - .await?; + if let Some(device_name) = &default_device_name { + sqlx::query(r#"UPDATE "Users" SET gpodderurl = $1, gpodderloginname = $2, gpoddertoken = $3, pod_sync_type = 'external', defaultgpodderdevice = $5 WHERE userid = $4"#) + .bind(gpodder_url) + .bind(username) + .bind(&encrypted_password) + .bind(user_id) + .bind(device_name) + .execute(pool) + .await?; + } else { + sqlx::query(r#"UPDATE "Users" SET gpodderurl = $1, gpodderloginname = $2, gpoddertoken = $3, pod_sync_type = 'external' WHERE userid = $4"#) + .bind(gpodder_url) + .bind(username) + .bind(&encrypted_password) + .bind(user_id) + .execute(pool) + .await?; + } } DatabasePool::MySQL(pool) => { - sqlx::query("UPDATE Users SET GpodderUrl = ?, GpodderLoginName = ?, GpodderToken = ?, Pod_Sync_Type = 'external' WHERE UserID = ?") - .bind(gpodder_url) - .bind(username) - .bind(&encrypted_password) - .bind(user_id) - .execute(pool) - .await?; + if let Some(device_name) = &default_device_name { + sqlx::query("UPDATE Users SET GpodderUrl = ?, GpodderLoginName = ?, GpodderToken = ?, Pod_Sync_Type = 'external', DefaultGpodderDevice = ? WHERE UserID = ?") + .bind(gpodder_url) + .bind(username) + .bind(&encrypted_password) + .bind(device_name) + .bind(user_id) + .execute(pool) + .await?; + } else { + sqlx::query("UPDATE Users SET GpodderUrl = ?, GpodderLoginName = ?, GpodderToken = ?, Pod_Sync_Type = 'external' WHERE UserID = ?") + .bind(gpodder_url) + .bind(username) + .bind(&encrypted_password) + .bind(user_id) + .execute(pool) + .await?; + } } } + // Spawn initial full sync as background task to avoid blocking the API response + if let Some(device_name) = default_device_name.clone() { + let pool_clone = self.clone(); + let gpodder_url_owned = gpodder_url.to_string(); + let username_owned = username.to_string(); + let password_owned = password.to_string(); + + tokio::spawn(async move { + if let Err(e) = pool_clone.call_gpodder_initial_full_sync(user_id, &gpodder_url_owned, &username_owned, &password_owned, &device_name).await { + tracing::warn!("Initial GPodder full sync failed during external server setup: {}", e); + } + }); + } + Ok(true) } + // Helper function to get first device from external GPodder server + async fn get_first_device_from_server(&self, gpodder_url: &str, username: &str, password: &str) -> AppResult { + let session = self.create_gpodder_session_with_password(gpodder_url, username, password).await?; + + let devices_url = format!("{}/api/2/devices/{}.json", gpodder_url.trim_end_matches('/'), username); + + let response = if session.authenticated { + session.client.get(&devices_url).send().await + } else { + session.client.get(&devices_url).basic_auth(username, Some(password)).send().await + }; + + match response { + Ok(resp) if resp.status().is_success() => { + let devices_data: serde_json::Value = resp.json().await + .map_err(|e| AppError::internal(&format!("Failed to parse devices: {}", e)))?; + + if let Some(devices_array) = devices_data.as_array() { + if let Some(first_device) = devices_array.first() { + if let Some(device_id) = first_device.get("id").and_then(|v| v.as_str()) { + return Ok(device_id.to_string()); + } + } + } + + Err(AppError::internal("No devices found on external GPodder server")) + } + Ok(resp) => { + Err(AppError::internal(&format!("Failed to get devices from external server: {}", resp.status()))) + } + Err(e) => { + Err(AppError::internal(&format!("Error connecting to external server: {}", e))) + } + } + } + // Encrypt password using Fernet - matches Python encryption - async fn encrypt_password(&self, password: &str) -> AppResult { + pub async fn encrypt_password(&self, password: &str) -> AppResult { use fernet::Fernet; - // Get encryption key from app settings + // Get encryption key from app settings (base64 string) let encryption_key = self.get_encryption_key().await?; let fernet = Fernet::new(&encryption_key) .ok_or_else(|| AppError::internal("Failed to create Fernet cipher"))?; @@ -9871,13 +11013,24 @@ impl DatabasePool { async fn get_encryption_key(&self) -> AppResult { match self { DatabasePool::Postgres(pool) => { + // Try as string first (new format), fallback to bytes (old format) let row = sqlx::query(r#"SELECT encryptionkey FROM "AppSettings" LIMIT 1"#) .fetch_optional(pool) .await?; if let Some(row) = row { - let key: Option = row.try_get("encryptionkey")?; - key.ok_or_else(|| AppError::internal("Encryption key not found")) + // Try string first + if let Ok(key_string) = row.try_get::("encryptionkey") { + return Ok(key_string); + } + // Fallback to bytes and convert to string + let key_bytes: Option> = row.try_get("encryptionkey")?; + if let Some(bytes) = key_bytes { + String::from_utf8(bytes) + .map_err(|e| AppError::internal(&format!("Invalid UTF-8 in encryption key: {}", e))) + } else { + Err(AppError::internal("Encryption key not found")) + } } else { Err(AppError::internal("App settings not found")) } @@ -9913,8 +11066,8 @@ impl DatabasePool { if url.is_some() && username.is_some() { Ok(Some(serde_json::json!({ - "gpodder_url": url.unwrap_or_default(), - "gpodder_username": username.unwrap_or_default(), + "gpodderurl": url.unwrap_or_default(), + "gpoddertoken": "", // Frontend expects this field but it's not used for display "sync_type": sync_type.unwrap_or_default() }))) } else { @@ -9937,8 +11090,8 @@ impl DatabasePool { if url.is_some() && username.is_some() { Ok(Some(serde_json::json!({ - "gpodder_url": url.unwrap_or_default(), - "gpodder_username": username.unwrap_or_default(), + "gpodderurl": url.unwrap_or_default(), + "gpoddertoken": "", // Frontend expects this field but it's not used for display "sync_type": sync_type.unwrap_or_default() }))) } else { @@ -10045,49 +11198,140 @@ impl DatabasePool { } } - // Get gPodder devices for user - matches Python get_devices function exactly with remote device support - pub async fn gpodder_get_user_devices(&self, user_id: i32) -> AppResult> { - // Get local devices - let mut local_devices = self.get_local_devices(user_id).await?; + // Set default gPodder device by name - for frontend compatibility + pub async fn gpodder_set_default_device_by_name(&self, user_id: i32, device_name: &str) -> AppResult { + println!("Setting default device: user_id={}, device_name='{}'", user_id, device_name); - // Get remote devices if sync is configured - if let Some(sync_settings) = self.get_user_sync_settings(user_id).await? { - if sync_settings.sync_type != "None" && sync_settings.sync_type != "nextcloud" { - let remote_devices = self.fetch_remote_devices(&sync_settings).await.unwrap_or_default(); - - // Merge remote devices with local, handling conflicts - for remote_device in remote_devices { - let existing_local = local_devices.iter().find(|d| - d["name"].as_str() == Some(&remote_device.device_name) - ); - - if existing_local.is_none() { - // Add remote device with negative ID to indicate it's remote - local_devices.push(serde_json::json!({ - "id": -(remote_device.device_id.abs()), // Negative for remote - "name": remote_device.device_name, - "type": remote_device.device_type, - "caption": remote_device.device_caption, - "last_sync": Option::::None, - "is_active": true, - "is_default": false, // Remote devices are never default locally - "is_remote": true - })); - } - } - } - } + // Check sync type to determine which tables to update + let sync_settings = self.get_user_sync_settings(user_id).await?; + let is_external = sync_settings + .as_ref() + .map(|s| s.sync_type == "external") + .unwrap_or(false); - Ok(local_devices) - } - - // Get local devices only - internal helper - async fn get_local_devices(&self, user_id: i32) -> AppResult> { match self { DatabasePool::Postgres(pool) => { - let rows = sqlx::query(r#"SELECT deviceid, devicename, devicetype, isdefault FROM "GpodderDevices" WHERE userid = $1 ORDER BY devicename"#) - .bind(user_id) - .fetch_all(pool) + if !is_external { + // For internal/both: update GpodderDevices table + let clear_result = sqlx::query(r#"UPDATE "GpodderDevices" SET isdefault = false WHERE userid = $1"#) + .bind(user_id) + .execute(pool) + .await?; + println!("Cleared {} devices for user {}", clear_result.rows_affected(), user_id); + + let result = sqlx::query(r#"UPDATE "GpodderDevices" SET isdefault = true WHERE devicename = $1 AND userid = $2"#) + .bind(device_name) + .bind(user_id) + .execute(pool) + .await?; + println!("Set default result: {} rows affected", result.rows_affected()); + } + + // Always update Users table (for both internal and external) + sqlx::query(r#"UPDATE "Users" SET defaultgpodderdevice = $1 WHERE userid = $2"#) + .bind(device_name) + .bind(user_id) + .execute(pool) + .await?; + println!("Updated Users table with default device"); + + Ok(true) + } + DatabasePool::MySQL(pool) => { + if !is_external { + // For internal/both: update GpodderDevices table + let clear_result = sqlx::query("UPDATE GpodderDevices SET IsDefault = false WHERE UserID = ?") + .bind(user_id) + .execute(pool) + .await?; + println!("Cleared {} devices for user {}", clear_result.rows_affected(), user_id); + + let result = sqlx::query("UPDATE GpodderDevices SET IsDefault = true WHERE DeviceName = ? AND UserID = ?") + .bind(device_name) + .bind(user_id) + .execute(pool) + .await?; + println!("Set default result: {} rows affected", result.rows_affected()); + } + + // Always update Users table (for both internal and external) + sqlx::query("UPDATE Users SET DefaultGpodderDevice = ? WHERE UserID = ?") + .bind(device_name) + .bind(user_id) + .execute(pool) + .await?; + println!("Updated Users table with default device"); + + Ok(true) + } + } + } + + // Get gPodder devices for user - matches Python get_devices function exactly with remote device support + pub async fn gpodder_get_user_devices(&self, user_id: i32) -> AppResult> { + // Check what type of sync is configured + if let Some(sync_settings) = self.get_user_sync_settings(user_id).await? { + match sync_settings.sync_type.as_str() { + "gpodder" | "external" => { + // Both internal and external use HTTP API calls to GPodder server + // Internal: http://localhost:8042, External: user's configured URL + let mut devices = self.fetch_devices_from_gpodder_api(&sync_settings).await?; + + // Get the default device name from Users table to mark the correct device + let default_device_name = match self { + DatabasePool::Postgres(pool) => { + let row = sqlx::query(r#"SELECT defaultgpodderdevice FROM "Users" WHERE userid = $1"#) + .bind(user_id) + .fetch_optional(pool) + .await?; + + row.and_then(|r| r.try_get::, _>("defaultgpodderdevice").ok().flatten()) + } + DatabasePool::MySQL(pool) => { + let row = sqlx::query("SELECT DefaultGpodderDevice FROM Users WHERE UserID = ?") + .bind(user_id) + .fetch_optional(pool) + .await?; + + row.and_then(|r| r.try_get::, _>("DefaultGpodderDevice").ok().flatten()) + } + }; + + // Mark the default device + if let Some(default_name) = default_device_name { + for device in &mut devices { + if device.get("name").and_then(|v| v.as_str()) == Some(&default_name) { + device["is_default"] = serde_json::Value::Bool(true); + device["is_remote"] = serde_json::Value::Bool(false); // Default device is treated as local + break; + } + } + } + + Ok(devices) + } + "nextcloud" => { + // Nextcloud doesn't have device concept like GPodder + Ok(vec![]) + } + _ => { + // No sync configured - return empty list + Ok(vec![]) + } + } + } else { + // No sync settings found - return empty list + Ok(vec![]) + } + } + + // Get local devices only - internal helper + async fn get_local_devices(&self, user_id: i32) -> AppResult> { + match self { + DatabasePool::Postgres(pool) => { + let rows = sqlx::query(r#"SELECT deviceid, devicename, devicetype, isdefault FROM "GpodderDevices" WHERE userid = $1 ORDER BY devicename"#) + .bind(user_id) + .fetch_all(pool) .await?; let mut devices = Vec::new(); @@ -10129,46 +11373,70 @@ impl DatabasePool { } } - // Fetch remote devices from gPodder server - matches Python remote device fetching - async fn fetch_remote_devices(&self, settings: &UserSyncSettings) -> AppResult> { - let session = self.create_gpodder_session(settings).await?; + // Fetch devices from GPodder API server - works for both internal and external + async fn fetch_devices_from_gpodder_api(&self, settings: &UserSyncSettings) -> AppResult> { + // For internal GPodder API, use X-GPodder-Token header + // For external GPodder API, use session auth with basic auth fallback + let (client, auth_headers) = if settings.url == "http://localhost:8042" { + // Internal GPodder API - use X-GPodder-Token + let client = reqwest::Client::new(); + let mut headers = reqwest::header::HeaderMap::new(); + headers.insert("X-GPodder-Token", settings.token.parse().unwrap()); + (client, Some(headers)) + } else { + // External GPodder API - decrypt password and use session auth + let decrypted_password = self.decrypt_password(&settings.token).await?; + let session = self.create_gpodder_session_with_password(&settings.url, &settings.username, &decrypted_password).await?; + (session.client, None) + }; let devices_url = format!("{}/api/2/devices/{}.json", settings.url.trim_end_matches('/'), settings.username); - let response = session.client - .get(&devices_url) + let mut request = client.get(&devices_url); + + // Add authentication headers if needed + if let Some(headers) = auth_headers { + request = request.headers(headers); + } else { + // External server with session auth - if session failed, fall back to basic auth + let decrypted_password = self.decrypt_password(&settings.token).await?; + request = request.basic_auth(&settings.username, Some(&decrypted_password)); + } + + let response = request .send() .await - .map_err(|e| AppError::internal(&format!("Failed to fetch remote devices: {}", e)))?; + .map_err(|e| AppError::internal(&format!("Failed to fetch devices from GPodder API: {}", e)))?; if response.status().is_success() { let devices_data: serde_json::Value = response.json().await - .map_err(|e| AppError::internal(&format!("Failed to parse remote devices: {}", e)))?; + .map_err(|e| AppError::internal(&format!("Failed to parse devices from GPodder API: {}", e)))?; - let mut remote_devices = Vec::new(); + let mut devices = Vec::new(); if let Some(devices_array) = devices_data.as_array() { - for (index, device) in devices_array.iter().enumerate() { - if let (Some(device_name), Some(device_type)) = ( - device["id"].as_str(), - device["type"].as_str() - ) { - remote_devices.push(GpodderDevice { - device_id: -(index as i32 + 1000), // Negative ID for remote devices - device_name: device_name.to_string(), - device_type: device_type.to_string(), - device_caption: device["caption"].as_str().map(|s| s.to_string()), - is_default: false, - is_remote: true, - user_id: 0, // Remote devices don't have local user_id - }); + for device in devices_array.iter() { + if let Some(device_id) = device["id"].as_str() { + // Convert GPodder API response to Pinepods device format + devices.push(serde_json::json!({ + "id": device_id, // Use actual GPodder device ID (string) + "name": device_id, + "type": device["type"].as_str().unwrap_or("other"), + "caption": device["caption"].as_str().unwrap_or(device_id), + "last_sync": Option::::None, + "is_active": true, + "is_default": false, // GPodder devices are not local defaults + "is_remote": true, + "subscriptions": device["subscriptions"].as_i64().unwrap_or(0) + })); } } } - Ok(remote_devices) + Ok(devices) } else { + tracing::warn!("Failed to fetch devices from GPodder API: {}", response.status()); Ok(Vec::new()) } } @@ -10215,6 +11483,42 @@ impl DatabasePool { }) } + // Create gPodder session with already-decrypted password (avoids double decryption) + async fn create_gpodder_session_with_password(&self, gpodder_url: &str, username: &str, password: &str) -> AppResult { + let jar = std::sync::Arc::new(reqwest::cookie::Jar::default()); + let client = reqwest::Client::builder() + .cookie_provider(jar) + .build() + .map_err(|e| AppError::internal(&format!("Failed to create HTTP client: {}", e)))?; + + // Try session-based authentication first - matches Python login flow + let login_url = format!("{}/api/2/auth/{}/login.json", + gpodder_url.trim_end_matches('/'), username); + + let login_response = client + .post(&login_url) + .basic_auth(username, Some(password)) + .send() + .await; + + let session_authenticated = match login_response { + Ok(response) if response.status().is_success() => { + tracing::info!("gPodder session authentication successful"); + true + } + _ => { + tracing::warn!("gPodder session authentication failed, will use basic auth"); + false + } + }; + + Ok(GpodderSession { + client, + session_id: None, + authenticated: session_authenticated, + }) + } + // gPodder force sync - calls Go gPodder service exactly like Python force_full_sync_to_gpodder pub async fn gpodder_force_sync(&self, user_id: i32) -> AppResult { let sync_settings = self.get_user_sync_settings(user_id).await?; @@ -10227,7 +11531,7 @@ impl DatabasePool { match settings.sync_type.as_str() { "gpodder" => { - // Internal gPodder API on localhost:8042 - use encrypted token directly + // Internal gPodder API on localhost:8042 - use unencrypted token directly self.call_gpodder_service_sync(user_id, "http://localhost:8042", &settings.username, &settings.token, &device_name, true).await } "nextcloud" => { @@ -10258,1556 +11562,2715 @@ impl DatabasePool { let settings = sync_settings.unwrap(); let device_name = self.get_or_create_default_device(user_id).await?; - match settings.sync_type.as_str() { + let success = match settings.sync_type.as_str() { "gpodder" => { - self.call_gpodder_service_sync(user_id, "http://localhost:8042", &settings.username, &settings.token, &device_name, false).await?; - Ok(SyncResult { synced_podcasts: 1, synced_episodes: 0 }) + self.call_gpodder_service_sync(user_id, "http://localhost:8042", &settings.username, &settings.token, &device_name, false).await? } "nextcloud" => { - self.sync_with_nextcloud(user_id, &settings, false).await?; - Ok(SyncResult { synced_podcasts: 1, synced_episodes: 0 }) + self.sync_with_nextcloud(user_id, &settings, false).await? } "external" => { let decrypted_token = self.decrypt_password(&settings.token).await?; - self.call_gpodder_service_sync(user_id, &settings.url, &settings.username, &decrypted_token, &device_name, false).await?; - Ok(SyncResult { synced_podcasts: 1, synced_episodes: 0 }) + self.call_gpodder_service_sync(user_id, &settings.url, &settings.username, &decrypted_token, &device_name, false).await? } "both" => { - self.call_gpodder_service_sync(user_id, "http://localhost:8042", &settings.username, &settings.token, &device_name, false).await?; + let internal_success = self.call_gpodder_service_sync(user_id, "http://localhost:8042", &settings.username, &settings.token, &device_name, false).await?; let decrypted_token = self.decrypt_password(&settings.token).await?; - self.call_gpodder_service_sync(user_id, &settings.url, &settings.username, &decrypted_token, &device_name, false).await?; - Ok(SyncResult { synced_podcasts: 2, synced_episodes: 0 }) + let external_success = self.call_gpodder_service_sync(user_id, &settings.url, &settings.username, &decrypted_token, &device_name, false).await?; + internal_success || external_success } - _ => Ok(SyncResult { synced_podcasts: 0, synced_episodes: 0 }) - } + _ => false + }; + + Ok(SyncResult { + synced_podcasts: if success { 1 } else { 0 }, + synced_episodes: 0 + }) } // Call gPodder service for sync - matches Python API calls exactly with enhanced error handling async fn call_gpodder_service_sync(&self, user_id: i32, gpodder_url: &str, username: &str, password: &str, device_name: &str, force: bool) -> AppResult { - let session = self.create_gpodder_session(&UserSyncSettings { - url: gpodder_url.to_string(), - username: username.to_string(), - token: password.to_string(), - sync_type: if gpodder_url == "http://localhost:8042" { "gpodder".to_string() } else { "external".to_string() }, - }).await?; - - // Step 1: Get subscriptions from gPodder service with session or basic auth fallback - let subscriptions_url = format!("{}/api/2/subscriptions/{}/{}.json", gpodder_url.trim_end_matches('/'), username, device_name); - - let response = if session.authenticated { - session.client - .get(&subscriptions_url) + // Step 1: Get ALL devices first (critical for detecting changes from external devices like AntennaPod) + let devices_url = format!("{}/api/2/devices/{}.json", + gpodder_url.trim_end_matches('/'), username); + + // Use correct authentication based on internal vs external + let devices_response = if gpodder_url == "http://localhost:8042" { + // Internal GPodder API - use X-GPodder-Token header + let client = reqwest::Client::new(); + client.get(&devices_url) + .header("X-GPodder-Token", password) .send() .await } else { - session.client - .get(&subscriptions_url) - .basic_auth(username, Some(password)) - .send() - .await + // External GPodder API - use session auth with basic fallback + let session = self.create_gpodder_session_with_password(gpodder_url, username, password).await?; + if session.authenticated { + session.client + .get(&devices_url) + .send() + .await + } else { + session.client + .get(&devices_url) + .basic_auth(username, Some(password)) + .send() + .await + } }; - match response { + let devices = match devices_response { Ok(resp) if resp.status().is_success() => { - let subscriptions: Vec = resp.json().await - .map_err(|e| AppError::internal(&format!("Failed to parse gPodder subscriptions: {}", e)))?; - - tracing::info!("Downloaded {} subscriptions from gPodder service", subscriptions.len()); + let devices_data: serde_json::Value = resp.json().await + .map_err(|e| AppError::internal(&format!("Failed to parse devices: {}", e)))?; - // Step 2: Process subscriptions and add missing podcasts - self.process_gpodder_subscriptions(user_id, &subscriptions).await?; - - // Step 3: Upload local subscriptions to gPodder service - let local_subscriptions = self.get_user_podcast_feeds(user_id).await?; - self.upload_subscriptions_to_gpodder_with_session(&session, gpodder_url, username, password, device_name, &local_subscriptions).await?; - - // Step 4: Sync episode actions (listening progress) with enhanced error handling - if let Err(e) = self.sync_episode_actions_with_gpodder(gpodder_url, username, password, device_name, user_id).await { - tracing::warn!("Episode actions sync failed but continuing: {}", e); - // Don't fail the entire sync if episode actions fail + if let Some(devices_array) = devices_data.as_array() { + let device_names: Vec = devices_array.iter() + .filter_map(|device| device.get("id").and_then(|v| v.as_str()).map(|s| s.to_string())) + .collect(); + + tracing::info!("Found {} devices for user: {:?}", device_names.len(), device_names); + device_names + } else { + tracing::warn!("No devices found for user"); + vec![] } - - // Step 5: Update last sync timestamp for next incremental sync (BETTER than Python) - self.update_last_sync_timestamp(user_id).await?; - - Ok(true) } Ok(resp) => { - tracing::warn!("gPodder service returned error: {}", resp.status()); + tracing::warn!("Failed to get devices: {}", resp.status()); if force { - // For force sync, treat non-success as failure - Err(AppError::internal(&format!("Force sync failed with status: {}", resp.status()))) + return Err(AppError::internal(&format!("Force sync failed to get devices: {}", resp.status()))); } else { - // For regular sync, continue gracefully - Ok(false) + return Ok(false); } } Err(e) => { - tracing::error!("Failed to connect to gPodder service: {}", e); + tracing::error!("Failed to connect to get devices: {}", e); if force { - Err(AppError::internal(&format!("Force sync connection failed: {}", e))) + return Err(AppError::internal(&format!("Force sync connection failed: {}", e))); } else { - Ok(false) + return Ok(false); } } - } - } - - // Upload subscriptions with session support and error handling - async fn upload_subscriptions_to_gpodder_with_session(&self, session: &GpodderSession, gpodder_url: &str, username: &str, password: &str, device_name: &str, subscriptions: &[String]) -> AppResult<()> { - let upload_url = format!("{}/api/2/subscriptions/{}/{}.json", gpodder_url.trim_end_matches('/'), username, device_name); - - let response = if session.authenticated { - session.client - .put(&upload_url) - .json(subscriptions) - .send() - .await - } else { - session.client - .put(&upload_url) - .basic_auth(username, Some(password)) - .json(subscriptions) - .send() - .await }; - match response { - Ok(resp) if resp.status().is_success() => { - tracing::info!("Successfully uploaded {} subscriptions to gPodder service", subscriptions.len()); - Ok(()) - } - Ok(resp) => { - tracing::warn!("Failed to upload subscriptions: {}", resp.status()); - // Don't fail sync for upload failures - log and continue - Ok(()) - } - Err(e) => { - tracing::warn!("Error uploading subscriptions: {}", e); - // Don't fail sync for upload failures - log and continue - Ok(()) - } - } - } - - // Process gPodder subscriptions and add missing podcasts - async fn process_gpodder_subscriptions(&self, user_id: i32, subscriptions: &[String]) -> AppResult<()> { - for feed_url in subscriptions { - let exists = self.podcast_exists_for_user(user_id, feed_url).await?; - if !exists { - // Add new podcast - this would call the actual add_podcast function - tracing::info!("Would add podcast {} for user {}", feed_url, user_id); - } - } - Ok(()) - } - - // Upload local subscriptions to gPodder service - matches Python PUT /api/2/subscriptions/{username}/{device}.json - async fn upload_subscriptions_to_gpodder(&self, gpodder_url: &str, username: &str, password: &str, device_name: &str, subscriptions: &[String]) -> AppResult<()> { - let client = reqwest::Client::new(); - let upload_url = format!("{}/api/2/subscriptions/{}/{}.json", gpodder_url.trim_end_matches('/'), username, device_name); - - let response = client - .put(&upload_url) - .basic_auth(username, Some(password)) - .json(subscriptions) - .send() - .await - .map_err(|e| AppError::internal(&format!("Failed to upload subscriptions: {}", e)))?; - - if !response.status().is_success() { - tracing::warn!("Failed to upload subscriptions to gPodder service: {}", response.status()); - } - - Ok(()) - } - - // Sync episode actions with gPodder service - matches Python episode actions sync with timestamp support - async fn sync_episode_actions_with_gpodder(&self, gpodder_url: &str, username: &str, password: &str, _device_name: &str, user_id: i32) -> AppResult<()> { - let session = self.create_gpodder_session(&UserSyncSettings { - url: gpodder_url.to_string(), - username: username.to_string(), - token: password.to_string(), - sync_type: "external".to_string(), - }).await?; + tracing::info!("Found {} devices to sync from: {:?}", devices.len(), devices); - // Get last sync timestamp for incremental sync (BETTER than Python - follows GPodder spec) + // Step 2: Get subscriptions from ALL devices with timestamps (like AntennaPod sync) let since_timestamp = self.get_last_sync_timestamp(user_id).await?; + let mut all_subscriptions = std::collections::HashSet::new(); + let mut all_removals = std::collections::HashSet::new(); - // Get local episode actions since last sync for efficient incremental sync - let local_actions = if let Some(since) = since_timestamp { - self.get_user_episode_actions_since(user_id, since).await? - } else { - self.get_user_episode_actions(user_id).await? - }; - - // Upload local actions to gPodder service - matches Python POST /api/2/episodes/{username}.json - if !local_actions.is_empty() { - let upload_url = format!("{}/api/2/episodes/{}.json", gpodder_url.trim_end_matches('/'), username); + for device_id in &devices { + tracing::info!("Getting subscriptions from device: {}", device_id); - let response = if session.authenticated { - // Use session-based authentication - session.client - .post(&upload_url) - .json(&local_actions) - .send() - .await + let subscriptions_url = if let Some(since) = since_timestamp { + format!("{}/api/2/subscriptions/{}/{}.json?since={}", + gpodder_url.trim_end_matches('/'), username, device_id, since.timestamp()) } else { - // Fallback to basic auth - session.client - .post(&upload_url) - .basic_auth(username, Some(password)) - .json(&local_actions) + format!("{}/api/2/subscriptions/{}/{}.json?since=0", + gpodder_url.trim_end_matches('/'), username, device_id) + }; + + let device_response = if gpodder_url == "http://localhost:8042" { + let client = reqwest::Client::new(); + client.get(&subscriptions_url) + .header("X-GPodder-Token", password) .send() .await + } else { + let session = self.create_gpodder_session_with_password(gpodder_url, username, password).await?; + if session.authenticated { + session.client + .get(&subscriptions_url) + .send() + .await + } else { + session.client + .get(&subscriptions_url) + .basic_auth(username, Some(password)) + .send() + .await + } }; - match response { + match device_response { Ok(resp) if resp.status().is_success() => { - tracing::info!("Successfully uploaded {} episode actions", local_actions.len()); + let response_text = resp.text().await + .map_err(|e| AppError::internal(&format!("Failed to get response text: {}", e)))?; + + let sync_response: serde_json::Value = serde_json::from_str(&response_text) + .map_err(|e| AppError::internal(&format!("Failed to parse gPodder sync response: {}", e)))?; + + let device_subscriptions = sync_response["add"].as_array() + .unwrap_or(&vec![]) + .iter() + .filter_map(|v| v.as_str().map(|s| s.to_string())) + .collect::>(); + + let device_removals = sync_response["remove"].as_array() + .unwrap_or(&vec![]) + .iter() + .filter_map(|v| v.as_str().map(|s| s.to_string())) + .collect::>(); + + tracing::info!("Device {} has {} subscriptions and {} removals", device_id, device_subscriptions.len(), device_removals.len()); + + // Add to combined sets (HashSet automatically deduplicates) + for subscription in device_subscriptions { + all_subscriptions.insert(subscription); + } + + for removal in device_removals { + all_removals.insert(removal); + } } Ok(resp) => { - tracing::warn!("Failed to upload episode actions: {}", resp.status()); + tracing::warn!("Device {} returned error: {}", device_id, resp.status()); + // Continue with other devices instead of failing } Err(e) => { - return Err(AppError::internal(&format!("Failed to upload episode actions: {}", e))); + tracing::warn!("Failed to get subscriptions from device {}: {}", device_id, e); + // Continue with other devices instead of failing } } } - // Download remote actions from gPodder service with timestamp support - let mut download_url = format!("{}/api/2/episodes/{}.json", gpodder_url.trim_end_matches('/'), username); - - // Add since parameter for incremental sync - if let Some(since) = since_timestamp { - download_url = format!("{}?since={}", download_url, since.timestamp()); - } + tracing::info!("Downloaded {} unique subscriptions and {} unique removals from ALL devices", all_subscriptions.len(), all_removals.len()); - let response = if session.authenticated { - session.client - .get(&download_url) - .send() - .await + // Step 3: Get episode actions with pagination support + // DON'T filter by device - this would exclude actions with NULL DeviceID + // The server limits responses to 25k actions, so we need to loop until we get all of them + let mut all_episode_actions = Vec::new(); + const MAX_ACTIONS_PER_BATCH: usize = 25000; + + tracing::info!("Getting ALL episode actions (no device filter to include NULL device actions)"); + + let initial_since = if let Some(since) = since_timestamp { + since.timestamp() } else { - session.client - .get(&download_url) - .basic_auth(username, Some(password)) - .send() - .await + 0 }; - - match response { - Ok(resp) if resp.status().is_success() => { - let episode_response: serde_json::Value = resp.json().await - .map_err(|e| AppError::internal(&format!("Failed to parse episode actions response: {}", e)))?; - - let remote_actions = episode_response.get("actions") - .and_then(|v| v.as_array()) - .cloned() - .unwrap_or_default(); - - tracing::info!("Downloaded {} remote episode actions", remote_actions.len()); - - // Apply remote actions locally - self.apply_remote_episode_actions(user_id, &remote_actions).await?; - - // Update last sync timestamp for incremental sync (BETTER than Python) - self.update_last_sync_timestamp(user_id).await?; + + let mut current_since = initial_since; + let mut total_actions_fetched = 0; + + loop { + // Fetch WITHOUT device parameter to get ALL actions including those with NULL DeviceID + let episode_actions_url = format!("{}/api/2/episodes/{}.json?since={}", + gpodder_url.trim_end_matches('/'), username, current_since); + + let device_response = if gpodder_url == "http://localhost:8042" { + let client = reqwest::Client::new(); + client.get(&episode_actions_url) + .header("X-GPodder-Token", password) + .send() + .await + } else { + let session = self.create_gpodder_session_with_password(gpodder_url, username, password).await?; + if session.authenticated { + session.client + .get(&episode_actions_url) + .send() + .await + } else { + session.client + .get(&episode_actions_url) + .basic_auth(username, Some(password)) + .send() + .await + } + }; + + match device_response { + Ok(resp) if resp.status().is_success() => { + let response_text = resp.text().await + .map_err(|e| AppError::internal(&format!("Failed to get episode actions response text: {}", e)))?; + + let episode_data: serde_json::Value = serde_json::from_str(&response_text) + .map_err(|e| AppError::internal(&format!("Failed to parse episode actions response: {}", e)))?; + + let actions = episode_data.get("actions").and_then(|v| v.as_array()).cloned().unwrap_or_default(); + let batch_size = actions.len(); + let new_timestamp = episode_data.get("timestamp").and_then(|v| v.as_i64()).unwrap_or(current_since); + + tracing::info!("Fetched {} episode actions (since={})", batch_size, current_since); + + // Add actions from this batch + for action in actions { + all_episode_actions.push(action); + } + total_actions_fetched += batch_size; + + // If we got less than MAX_ACTIONS_PER_BATCH, we've reached the end + if batch_size < MAX_ACTIONS_PER_BATCH { + tracing::info!("Reached end of episode actions (got {} < {} limit)", batch_size, MAX_ACTIONS_PER_BATCH); + break; + } + + // Update since to the timestamp from the response for next iteration + if new_timestamp > current_since { + current_since = new_timestamp; + tracing::info!("Updated since timestamp to {} for next batch", current_since); + } else { + tracing::warn!("Timestamp didn't advance, stopping pagination to avoid infinite loop"); + break; + } + } + Ok(resp) => { + tracing::warn!("Episode actions request returned error: {}", resp.status()); + break; + } + Err(e) => { + tracing::warn!("Failed to get episode actions: {}", e); + break; + } } - Ok(resp) => { - tracing::warn!("Failed to download episode actions: {}", resp.status()); + } + + tracing::info!("Total: {} episode actions fetched", total_actions_fetched); + + println!("\n========== FORCE SYNC EPISODE ACTIONS DOWNLOAD COMPLETE =========="); + println!("📊 Downloaded {} total episode actions from ALL {} devices", all_episode_actions.len(), devices.len()); + + // Check if changelog-news-168 is in the downloaded actions + let has_changelog_168 = all_episode_actions.iter().any(|a| { + a.get("episode") + .and_then(|e| e.as_str()) + .map(|s| s.contains("changelog-news-168")) + .unwrap_or(false) + }); + + if has_changelog_168 { + println!("🎯 FOUND changelog-news-168 in downloaded episode actions!"); + // Find which device it came from + if let Some(action) = all_episode_actions.iter().find(|a| { + a.get("episode") + .and_then(|e| e.as_str()) + .map(|s| s.contains("changelog-news-168")) + .unwrap_or(false) + }) { + println!(" Device: {}", action.get("device").and_then(|d| d.as_str()).unwrap_or("unknown")); + println!(" Episode URL: {}", action.get("episode").and_then(|e| e.as_str()).unwrap_or("unknown")); + println!(" Position: {}", action.get("position").and_then(|p| p.as_i64()).unwrap_or(0)); + println!(" Timestamp: {}", action.get("timestamp").and_then(|t| t.as_i64()).unwrap_or(0)); } - Err(e) => { - return Err(AppError::internal(&format!("Failed to download episode actions: {}", e))); + } else { + println!("❌ changelog-news-168 NOT FOUND in downloaded episode actions!"); + println!(" This means the GPodder API never returned it across any device."); + } + println!("==================================================================\n"); + + // Step 4: Process all subscriptions (additions) + let subscriptions_vec: Vec = all_subscriptions.into_iter().collect(); + self.process_gpodder_subscriptions(user_id, &subscriptions_vec).await?; + + // Step 5: Process all subscription removals + let removals_vec: Vec = all_removals.into_iter().collect(); + if !removals_vec.is_empty() { + self.process_gpodder_subscription_removals(user_id, &removals_vec).await?; + } + + // Step 6: Process all episode actions + if !all_episode_actions.is_empty() { + if let Err(e) = self.apply_remote_episode_actions(user_id, &all_episode_actions).await { + tracing::warn!("Episode actions processing failed but continuing: {}", e); } } - Ok(()) + // Step 7: Upload local subscriptions to gPodder service (to default device) + if since_timestamp.is_none() || !subscriptions_vec.is_empty() || !removals_vec.is_empty() { + let local_subscriptions = self.get_user_podcast_feeds(user_id).await?; + self.upload_subscriptions_to_gpodder(gpodder_url, username, password, device_name, &local_subscriptions).await?; + } else { + tracing::info!("Skipping subscription upload - no changes detected in incremental sync"); + } + + // Step 8: Upload local episode actions to gPodder service (to default device) + if let Err(e) = self.sync_episode_actions_with_gpodder(gpodder_url, username, password, device_name, user_id).await { + tracing::warn!("Episode actions sync failed but continuing: {}", e); + } + + // Step 9: Update last sync timestamp for next incremental sync + self.update_last_sync_timestamp(user_id).await?; + + Ok(true) } - // Get user podcast feeds for sync - async fn get_user_podcast_feeds(&self, user_id: i32) -> AppResult> { - match self { - DatabasePool::Postgres(pool) => { - let rows = sqlx::query(r#"SELECT feedurl FROM "Podcasts" WHERE userid = $1"#) - .bind(user_id) - .fetch_all(pool) - .await?; + // Initial full sync for GPodder - gets ALL user subscriptions from ALL devices + pub async fn call_gpodder_initial_full_sync(&self, user_id: i32, gpodder_url: &str, username: &str, password: &str, device_name: &str) -> AppResult { + tracing::info!("Starting initial full GPodder sync for user {} from {}", user_id, gpodder_url); + + // Step 1: Get ALL devices first (this is how AntennaPod and other apps do it) + let devices_url = format!("{}/api/2/devices/{}.json", + gpodder_url.trim_end_matches('/'), username); + + let devices_response = if gpodder_url == "http://localhost:8042" { + let client = reqwest::Client::new(); + client.get(&devices_url).header("X-GPodder-Token", password).send().await + } else { + let session = self.create_gpodder_session_with_password(gpodder_url, username, password).await?; + if session.authenticated { + session.client.get(&devices_url).send().await + } else { + session.client.get(&devices_url).basic_auth(username, Some(password)).send().await + } + }; + + let devices = match devices_response { + Ok(resp) if resp.status().is_success() => { + let devices_data: serde_json::Value = resp.json().await + .map_err(|e| AppError::internal(&format!("Failed to parse devices: {}", e)))?; - let mut feeds = Vec::new(); - for row in rows { - feeds.push(row.try_get::("feedurl")?); + if let Some(devices_array) = devices_data.as_array() { + let device_names: Vec = devices_array.iter() + .filter_map(|device| device.get("id").and_then(|v| v.as_str()).map(|s| s.to_string())) + .collect(); + + tracing::info!("Found {} devices for user: {:?}", device_names.len(), device_names); + device_names + } else { + tracing::warn!("No devices found for user"); + vec![] } - Ok(feeds) } - DatabasePool::MySQL(pool) => { - let rows = sqlx::query("SELECT FeedURL FROM Podcasts WHERE UserID = ?") - .bind(user_id) - .fetch_all(pool) - .await?; - - let mut feeds = Vec::new(); - for row in rows { - feeds.push(row.try_get::("FeedURL")?); + Ok(resp) => { + tracing::error!("Failed to get devices: {}", resp.status()); + return Ok(false); + } + Err(e) => { + tracing::error!("Failed to connect for devices: {}", e); + return Ok(false); + } + }; + + // Step 2: Get subscriptions from ALL devices (like AntennaPod does) + let mut all_subscriptions = std::collections::HashSet::new(); + + for device_id in &devices { + tracing::info!("Getting subscriptions from device: {}", device_id); + + let subscriptions_url = format!("{}/api/2/subscriptions/{}/{}.json?since=0", + gpodder_url.trim_end_matches('/'), username, device_id); + + let response = if gpodder_url == "http://localhost:8042" { + let client = reqwest::Client::new(); + client.get(&subscriptions_url).header("X-GPodder-Token", password).send().await + } else { + let session = self.create_gpodder_session_with_password(gpodder_url, username, password).await?; + if session.authenticated { + session.client.get(&subscriptions_url).send().await + } else { + session.client.get(&subscriptions_url).basic_auth(username, Some(password)).send().await + } + }; + + match response { + Ok(resp) if resp.status().is_success() => { + let response_text = resp.text().await + .map_err(|e| AppError::internal(&format!("Failed to get response text: {}", e)))?; + + let sync_response: serde_json::Value = serde_json::from_str(&response_text) + .map_err(|e| AppError::internal(&format!("Failed to parse gPodder sync response: {}", e)))?; + + let device_subscriptions = sync_response["add"].as_array() + .unwrap_or(&vec![]) + .iter() + .filter_map(|v| v.as_str().map(|s| s.to_string())) + .collect::>(); + + tracing::info!("Device {} has {} subscriptions", device_id, device_subscriptions.len()); + + // Add all subscriptions to our set (deduplicates automatically) + for subscription in device_subscriptions { + all_subscriptions.insert(subscription); + } + } + Ok(resp) => { + tracing::warn!("Failed to get subscriptions from device {}: {}", device_id, resp.status()); + // Continue with other devices + } + Err(e) => { + tracing::warn!("Error getting subscriptions from device {}: {}", device_id, e); + // Continue with other devices } - Ok(feeds) } } - } + + let all_subscriptions: Vec = all_subscriptions.into_iter().collect(); + tracing::info!("Total unique subscriptions from all devices: {}", all_subscriptions.len()); + + // Step 2: Get episode actions from ALL devices with pagination support + // The server limits responses to 25k actions, so we need to loop until we get all of them + let mut all_episode_actions = Vec::new(); + const MAX_ACTIONS_PER_BATCH: usize = 25000; - // Apply remote episode actions locally - matches Python apply_episode_actions function exactly - async fn apply_remote_episode_actions(&self, user_id: i32, actions: &[serde_json::Value]) -> AppResult<()> { - for action in actions { - if let (Some(episode_url), Some(action_type)) = ( - action["episode"].as_str(), - action["action"].as_str() - ) { - match action_type { - "play" => { - if let (Some(position), Some(timestamp_str)) = ( - action["position"].as_i64(), - action["timestamp"].as_str() - ) { - // Find local episode by URL - if let Some(episode_id) = self.find_episode_by_url(user_id, episode_url).await? { - // Parse timestamp - if let Ok(timestamp) = chrono::DateTime::parse_from_rfc3339(timestamp_str) { - self.update_episode_progress(user_id, episode_id, position as i32, timestamp.naive_utc()).await?; - } - } - } + // DON'T filter by device for initial sync - this would exclude actions with NULL DeviceID + // The GPodder API filters WHERE DeviceID = X, which excludes NULL device actions + // For full sync, we want ALL actions regardless of device + tracing::info!("Getting ALL episode actions (not filtering by device to include NULL device actions)"); + + let mut current_since: i64 = 0; // Start from epoch + let mut total_actions_fetched = 0; + + loop { + // Fetch WITHOUT device parameter to get ALL actions including those with NULL DeviceID + let episode_actions_url = format!("{}/api/2/episodes/{}.json?since={}", + gpodder_url.trim_end_matches('/'), username, current_since); + + let response = if gpodder_url == "http://localhost:8042" { + let client = reqwest::Client::new(); + client.get(&episode_actions_url).header("X-GPodder-Token", password).send().await + } else { + let session = self.create_gpodder_session_with_password(gpodder_url, username, password).await?; + if session.authenticated { + session.client.get(&episode_actions_url).send().await + } else { + session.client.get(&episode_actions_url).basic_auth(username, Some(password)).send().await } - "download" => { - // Handle download actions if needed - tracing::info!("Download action for episode: {}", episode_url); + }; + + match response { + Ok(resp) if resp.status().is_success() => { + let episode_data: serde_json::Value = resp.json().await + .map_err(|e| AppError::internal(&format!("Failed to parse episode actions: {}", e)))?; + + let actions = episode_data.get("actions").and_then(|v| v.as_array()).cloned().unwrap_or_default(); + let batch_size = actions.len(); + let new_timestamp = episode_data.get("timestamp").and_then(|v| v.as_i64()).unwrap_or(current_since); + + tracing::info!("Fetched {} episode actions (since={})", batch_size, current_since); + + // Add actions from this batch + for action in actions { + all_episode_actions.push(action); } - "delete" => { - // Handle delete actions if needed - tracing::info!("Delete action for episode: {}", episode_url); + total_actions_fetched += batch_size; + + // If we got less than MAX_ACTIONS_PER_BATCH, we've reached the end + if batch_size < MAX_ACTIONS_PER_BATCH { + tracing::info!("Reached end of episode actions (got {} < {} limit)", batch_size, MAX_ACTIONS_PER_BATCH); + break; } - _ => { - tracing::warn!("Unknown action type: {}", action_type); + + // Update since to the timestamp from the response for next iteration + if new_timestamp > current_since { + current_since = new_timestamp; + tracing::info!("Updated since timestamp to {} for next batch", current_since); + } else { + tracing::warn!("Timestamp didn't advance, stopping pagination to avoid infinite loop"); + break; } } + Ok(resp) => { + tracing::warn!("Failed to get episode actions: {}", resp.status()); + break; + } + Err(e) => { + tracing::warn!("Error getting episode actions: {}", e); + break; + } } } - Ok(()) + + tracing::info!("Total: {} episode actions fetched", total_actions_fetched); + + println!("\n========== INITIAL SYNC EPISODE ACTIONS DOWNLOAD COMPLETE =========="); + println!("📊 Downloaded {} total episode actions from ALL {} devices", all_episode_actions.len(), devices.len()); + + // Check if changelog-news-168 is in the downloaded actions + let has_changelog_168 = all_episode_actions.iter().any(|a| { + a.get("episode") + .and_then(|e| e.as_str()) + .map(|s| s.contains("changelog-news-168")) + .unwrap_or(false) + }); + + if has_changelog_168 { + println!("🎯 FOUND changelog-news-168 in downloaded episode actions!"); + // Find which device it came from + if let Some(action) = all_episode_actions.iter().find(|a| { + a.get("episode") + .and_then(|e| e.as_str()) + .map(|s| s.contains("changelog-news-168")) + .unwrap_or(false) + }) { + println!(" Device: {}", action.get("device").and_then(|d| d.as_str()).unwrap_or("unknown")); + println!(" Episode URL: {}", action.get("episode").and_then(|e| e.as_str()).unwrap_or("unknown")); + println!(" Position: {}", action.get("position").and_then(|p| p.as_i64()).unwrap_or(0)); + println!(" Timestamp: {}", action.get("timestamp").and_then(|t| t.as_i64()).unwrap_or(0)); + } + } else { + println!("❌ changelog-news-168 NOT FOUND in downloaded episode actions!"); + println!(" This means the GPodder API never returned it across any device."); + } + println!("====================================================================\n"); + + // Step 3: Process all subscriptions and add missing podcasts FIRST + // This ensures podcasts and their episodes are in the database before applying episode actions + self.process_gpodder_subscriptions(user_id, &all_subscriptions).await?; + + // Process all episode actions and apply them locally AFTER subscriptions are processed + if !all_episode_actions.is_empty() { + if let Err(e) = self.apply_remote_episode_actions(user_id, &all_episode_actions).await { + tracing::warn!("Failed to apply remote episode actions: {}", e); + } + } + + // Step 4: Upload local subscriptions to GPodder service + let local_subscriptions = self.get_user_podcast_feeds(user_id).await?; + self.upload_subscriptions_to_gpodder(gpodder_url, username, password, device_name, &local_subscriptions).await?; + + // Step 5: Upload local episode actions to GPodder service + let local_episode_actions = self.get_user_episode_actions(user_id).await?; + if !local_episode_actions.is_empty() { + if let Err(e) = self.upload_episode_actions_to_gpodder(gpodder_url, username, password, &local_episode_actions).await { + tracing::warn!("Failed to upload local episode actions to GPodder: {}", e); + // Don't fail the sync if episode actions upload fails + } + } + + // Step 5: Clear any existing sync timestamp to start fresh for incremental syncs + self.clear_last_sync_timestamp(user_id).await?; + + tracing::info!("Initial full GPodder sync completed for user {}", user_id); + Ok(true) } - // Find episode ID by URL for user - async fn find_episode_by_url(&self, user_id: i32, episode_url: &str) -> AppResult> { - match self { - DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#" - SELECT e.episodeid - FROM "Episodes" e - JOIN "Podcasts" p ON e.podcastid = p.podcastid - WHERE e.episodeurl = $1 AND p.userid = $2 - "#) - .bind(episode_url) - .bind(user_id) - .fetch_optional(pool) - .await?; + // Initial full sync for Nextcloud - gets ALL user subscriptions + pub async fn call_nextcloud_initial_full_sync(&self, user_id: i32, nextcloud_url: &str, username: &str, password: &str) -> AppResult { + tracing::info!("Starting initial full Nextcloud sync for user {} from {}", user_id, nextcloud_url); + + let client = reqwest::Client::new(); + + // Get ALL subscriptions from Nextcloud gPodder Sync app + let subscriptions_url = format!("{}/index.php/apps/gpoddersync/subscriptions", nextcloud_url.trim_end_matches('/')); + + let response = client + .get(&subscriptions_url) + .basic_auth(username, Some(password)) + .send() + .await + .map_err(|e| AppError::internal(&format!("Failed to get Nextcloud subscriptions: {}", e)))?; + + if !response.status().is_success() { + tracing::error!("Failed to get Nextcloud subscriptions: {}", response.status()); + return Ok(false); + } + + let subscriptions: serde_json::Value = response.json().await + .map_err(|e| AppError::internal(&format!("Failed to parse Nextcloud subscriptions: {}", e)))?; + + // Process subscriptions - Nextcloud returns array of feed URLs + let feed_urls = if let Some(feeds) = subscriptions.as_array() { + let urls: Vec = feeds.iter() + .filter_map(|f| f.as_str().map(|s| s.to_string())) + .collect(); + + tracing::info!("Downloaded {} subscriptions from Nextcloud", urls.len()); + urls + } else { + tracing::warn!("No subscriptions found in Nextcloud response"); + vec![] + }; + + // Get ALL episode actions from Nextcloud + let episode_actions_url = format!("{}/index.php/apps/gpoddersync/episode_action", nextcloud_url.trim_end_matches('/')); + + let episode_response = client + .get(&episode_actions_url) + .basic_auth(username, Some(password)) + .send() + .await; + + let mut all_episode_actions = Vec::new(); + + match episode_response { + Ok(resp) if resp.status().is_success() => { + let episode_data: serde_json::Value = resp.json().await + .map_err(|e| AppError::internal(&format!("Failed to parse Nextcloud episode actions: {}", e)))?; - if let Some(row) = row { - Ok(Some(row.try_get("episodeid")?)) - } else { - Ok(None) + if let Some(actions) = episode_data.get("actions").and_then(|v| v.as_array()) { + tracing::info!("Downloaded {} episode actions from Nextcloud", actions.len()); + + // Add all episode actions + for action in actions { + all_episode_actions.push(action.clone()); + } } } - DatabasePool::MySQL(pool) => { - let row = sqlx::query(" - SELECT e.EpisodeID - FROM Episodes e - JOIN Podcasts p ON e.PodcastID = p.PodcastID - WHERE e.EpisodeURL = ? AND p.UserID = ? - ") - .bind(episode_url) - .bind(user_id) - .fetch_optional(pool) - .await?; - - if let Some(row) = row { - Ok(Some(row.try_get("EpisodeID")?)) - } else { - Ok(None) - } + Ok(resp) => { + tracing::warn!("Failed to get Nextcloud episode actions: {}", resp.status()); + // Continue even if episode actions fail + } + Err(e) => { + tracing::warn!("Error getting Nextcloud episode actions: {}", e); + // Continue even if episode actions fail + } + } + + // Process all episode actions and apply them locally + if !all_episode_actions.is_empty() { + if let Err(e) = self.apply_remote_episode_actions(user_id, &all_episode_actions).await { + tracing::warn!("Failed to apply remote episode actions from Nextcloud: {}", e); + } + } + + // Process all subscriptions and add missing podcasts + self.process_gpodder_subscriptions(user_id, &feed_urls).await?; + + // Upload local subscriptions to Nextcloud + let local_subscriptions = self.get_user_podcast_feeds(user_id).await?; + self.upload_subscriptions_to_nextcloud(nextcloud_url, username, password, &local_subscriptions).await?; + + // Upload local episode actions to Nextcloud + let local_episode_actions = self.get_user_episode_actions(user_id).await?; + if !local_episode_actions.is_empty() { + if let Err(e) = self.upload_episode_actions_to_nextcloud(nextcloud_url, username, password, &local_episode_actions).await { + tracing::warn!("Failed to upload local episode actions to Nextcloud: {}", e); + // Don't fail the sync if episode actions upload fails } } + + // Clear any existing sync timestamp to start fresh for incremental syncs + self.clear_last_sync_timestamp(user_id).await?; + + tracing::info!("Initial full Nextcloud sync completed for user {}", user_id); + Ok(true) } - // Update episode progress from remote sync - async fn update_episode_progress(&self, user_id: i32, episode_id: i32, position: i32, timestamp: chrono::NaiveDateTime) -> AppResult<()> { - match self { - DatabasePool::Postgres(pool) => { - // Insert or update episode history - sqlx::query(r#" - INSERT INTO "UserEpisodeHistory" (userid, episodeid, listenduration, listendate) - VALUES ($1, $2, $3, $4) - ON CONFLICT (userid, episodeid) - DO UPDATE SET listenduration = GREATEST("UserEpisodeHistory".listenduration, $3), listendate = $4 - "#) - .bind(user_id) - .bind(episode_id) - .bind(position) - .bind(timestamp) - .execute(pool) - .await?; + // Upload episode actions to GPodder server for initial sync + async fn upload_episode_actions_to_gpodder(&self, gpodder_url: &str, username: &str, password: &str, episode_actions: &[serde_json::Value]) -> AppResult<()> { + let upload_url = format!("{}/api/2/episodes/{}.json", gpodder_url.trim_end_matches('/'), username); + + // Use correct authentication based on internal vs external + let response = if gpodder_url == "http://localhost:8042" { + // Internal GPodder API - use X-GPodder-Token header + let client = reqwest::Client::new(); + client.post(&upload_url) + .header("X-GPodder-Token", password) + .json(episode_actions) + .send() + .await + } else { + // External GPodder API - use session auth with basic fallback + let session = self.create_gpodder_session_with_password(gpodder_url, username, password).await?; + if session.authenticated { + // Use session-based authentication + session.client + .post(&upload_url) + .json(episode_actions) + .send() + .await + } else { + // Fallback to basic auth + session.client + .post(&upload_url) + .basic_auth(username, Some(password)) + .json(episode_actions) + .send() + .await } - DatabasePool::MySQL(pool) => { - // Insert or update episode history - sqlx::query(" - INSERT INTO UserEpisodeHistory (UserID, EpisodeID, ListenDuration, ListenDate) - VALUES (?, ?, ?, ?) - ON DUPLICATE KEY UPDATE - ListenDuration = GREATEST(ListenDuration, VALUES(ListenDuration)), - ListenDate = VALUES(ListenDate) - ") - .bind(user_id) - .bind(episode_id) - .bind(position) - .bind(timestamp) - .execute(pool) - .await?; + }; + + match response { + Ok(resp) if resp.status().is_success() => { + tracing::info!("Successfully uploaded {} episode actions to GPodder", episode_actions.len()); + Ok(()) + } + Ok(resp) => { + tracing::warn!("Failed to upload episode actions to GPodder: {}", resp.status()); + Ok(()) // Don't fail the whole sync if upload fails + } + Err(e) => { + Err(AppError::internal(&format!("Failed to upload episode actions to GPodder: {}", e))) + } + } + } + + // Upload subscriptions to Nextcloud using the gPodder Sync app endpoint + async fn upload_subscriptions_to_nextcloud(&self, nextcloud_url: &str, username: &str, password: &str, subscriptions: &[String]) -> AppResult<()> { + let client = reqwest::Client::new(); + // Nextcloud gPodder Sync app uses the subscription_change endpoint + let upload_url = format!("{}/index.php/apps/gpoddersync/subscription_change/upload", nextcloud_url.trim_end_matches('/')); + + let response = client + .post(&upload_url) + .basic_auth(username, Some(password)) + .json(subscriptions) + .send() + .await + .map_err(|e| AppError::internal(&format!("Failed to upload subscriptions to Nextcloud: {}", e)))?; + + if response.status().is_success() { + tracing::info!("Successfully uploaded {} subscriptions to Nextcloud", subscriptions.len()); + Ok(()) + } else { + tracing::warn!("Failed to upload subscriptions to Nextcloud: {}", response.status()); + Ok(()) // Don't fail the whole sync if upload fails + } + } + + // Upload episode actions to Nextcloud using the gPodder Sync app endpoint + async fn upload_episode_actions_to_nextcloud(&self, nextcloud_url: &str, username: &str, password: &str, episode_actions: &[serde_json::Value]) -> AppResult<()> { + let client = reqwest::Client::new(); + // Nextcloud gPodder Sync app uses the episode_action endpoint + let upload_url = format!("{}/index.php/apps/gpoddersync/episode_action/create", nextcloud_url.trim_end_matches('/')); + + let response = client + .post(&upload_url) + .basic_auth(username, Some(password)) + .json(episode_actions) + .send() + .await + .map_err(|e| AppError::internal(&format!("Failed to upload episode actions to Nextcloud: {}", e)))?; + + if response.status().is_success() { + tracing::info!("Successfully uploaded {} episode actions to Nextcloud", episode_actions.len()); + Ok(()) + } else { + tracing::warn!("Failed to upload episode actions to Nextcloud: {}", response.status()); + Ok(()) // Don't fail the whole sync if upload fails + } + } + + // Upload subscriptions with session support and error handling + async fn upload_subscriptions_to_gpodder_with_session(&self, session: &GpodderSession, gpodder_url: &str, username: &str, password: &str, device_name: &str, subscriptions: &[String]) -> AppResult<()> { + let upload_url = format!("{}/api/2/subscriptions/{}/{}.json", gpodder_url.trim_end_matches('/'), username, device_name); + + // Format subscription changes according to GPodder API spec + let subscription_changes = serde_json::json!({ + "add": subscriptions, + "remove": [] + }); + + let response = if session.authenticated { + session.client + .post(&upload_url) + .json(&subscription_changes) + .send() + .await + } else { + session.client + .post(&upload_url) + .basic_auth(username, Some(password)) + .json(&subscription_changes) + .send() + .await + }; + + match response { + Ok(resp) if resp.status().is_success() => { + tracing::info!("Successfully uploaded {} subscriptions to gPodder service", subscriptions.len()); + Ok(()) + } + Ok(resp) => { + tracing::warn!("Failed to upload subscriptions: {}", resp.status()); + // Don't fail sync for upload failures - log and continue + Ok(()) + } + Err(e) => { + tracing::warn!("Error uploading subscriptions: {}", e); + // Don't fail sync for upload failures - log and continue + Ok(()) + } + } + } + + // Process gPodder subscriptions and add missing podcasts + async fn process_gpodder_subscriptions(&self, user_id: i32, subscriptions: &[String]) -> AppResult<()> { + let mut added = 0; + let mut skipped = 0; + let mut failed = 0; + + for feed_url in subscriptions { + tracing::info!("Processing podcast {} for user {}", feed_url, user_id); + + // Use the existing add_podcast_from_url function which handles duplicates and fetching + match self.add_podcast_from_url(user_id, feed_url, None).await { + Ok(_) => { + added += 1; + tracing::info!("Successfully added podcast: {}", feed_url); + } + Err(e) => { + // Check if it failed because it already exists + if self.podcast_exists_for_user(user_id, feed_url).await.unwrap_or(false) { + skipped += 1; + tracing::debug!("Podcast {} already exists for user {}", feed_url, user_id); + } else { + failed += 1; + tracing::warn!("Failed to add podcast {}: {}", feed_url, e); + } + } } } + + tracing::info!("GPodder subscription sync completed: {} added, {} skipped, {} failed", added, skipped, failed); Ok(()) } - // Get gPodder status - matches Python get_user_gpodder_status function exactly - pub async fn gpodder_get_status(&self, user_id: i32) -> AppResult { - match self { - DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#"SELECT pod_sync_type, gpodderurl, gpodderloginname FROM "Users" WHERE userid = $1"#) - .bind(user_id) - .fetch_optional(pool) - .await?; - - if let Some(row) = row { - let sync_type: Option = row.try_get("pod_sync_type")?; - let gpodder_url: Option = row.try_get("gpodderurl")?; - let gpodder_login: Option = row.try_get("gpodderloginname")?; - - let sync_type = sync_type.unwrap_or_else(|| "None".to_string()); - - Ok(GpodderStatus { - sync_type: sync_type.clone(), - gpodder_url, - gpodder_login, - }) - } else { - Ok(GpodderStatus { - sync_type: "None".to_string(), - gpodder_url: None, - gpodder_login: None, - }) + // Process subscription removals from gPodder service + async fn process_gpodder_subscription_removals(&self, user_id: i32, removals: &[String]) -> AppResult<()> { + let mut removed = 0; + let mut not_found = 0; + let mut failed = 0; + + for feed_url in removals { + tracing::info!("Processing podcast removal {} for user {}", feed_url, user_id); + + // Check if the podcast exists locally first + if self.podcast_exists_for_user(user_id, feed_url).await.unwrap_or(false) { + // Remove the podcast using existing function + match self.remove_podcast_by_url(user_id, feed_url).await { + Ok(_) => { + removed += 1; + tracing::info!("Successfully removed podcast: {}", feed_url); + } + Err(e) => { + failed += 1; + tracing::warn!("Failed to remove podcast {}: {}", feed_url, e); + } } + } else { + not_found += 1; + tracing::debug!("Podcast {} not found locally for user {} (already removed)", feed_url, user_id); } - DatabasePool::MySQL(pool) => { - let row = sqlx::query("SELECT Pod_Sync_Type, GpodderUrl, GpodderLoginName FROM Users WHERE UserID = ?") - .bind(user_id) - .fetch_optional(pool) - .await?; + } + + tracing::info!("GPodder subscription removal completed: {} removed, {} not found, {} failed", removed, not_found, failed); + Ok(()) + } + + // Detect and remove orphaned local podcasts that are not in the remote subscription list + async fn sync_local_podcast_removals(&self, user_id: i32, remote_subscriptions: &[String]) -> AppResult> { + let local_subscriptions = self.get_user_podcast_feeds(user_id).await?; + let remote_set: std::collections::HashSet = remote_subscriptions.iter().cloned().collect(); + + let mut removed_podcasts = Vec::new(); + + // Find podcasts that exist locally but not in remote subscriptions + for local_feed in &local_subscriptions { + if !remote_set.contains(local_feed) { + tracing::info!("Local podcast {} not found in remote subscriptions, removing", local_feed); - if let Some(row) = row { - let sync_type: Option = row.try_get("Pod_Sync_Type")?; - let gpodder_url: Option = row.try_get("GpodderUrl")?; - let gpodder_login: Option = row.try_get("GpodderLoginName")?; - - let sync_type = sync_type.unwrap_or_else(|| "None".to_string()); - - Ok(GpodderStatus { - sync_type: sync_type.clone(), - gpodder_url, - gpodder_login, - }) - } else { - Ok(GpodderStatus { - sync_type: "None".to_string(), - gpodder_url: None, - gpodder_login: None, - }) + match self.remove_podcast_by_url(user_id, local_feed).await { + Ok(_) => { + removed_podcasts.push(local_feed.clone()); + tracing::info!("Successfully removed orphaned local podcast: {}", local_feed); + } + Err(e) => { + tracing::warn!("Failed to remove orphaned local podcast {}: {}", local_feed, e); + } } } } + + if !removed_podcasts.is_empty() { + tracing::info!("Removed {} orphaned local podcasts", removed_podcasts.len()); + } + + Ok(removed_podcasts) } - // Toggle gPodder sync - matches Python toggle_gpodder function exactly - pub async fn gpodder_toggle_sync(&self, user_id: i32) -> AppResult { - let current_status = self.gpodder_get_status(user_id).await?; - let current_enabled = current_status.sync_type != "None" && !current_status.sync_type.is_empty(); - let new_enabled = !current_enabled; + // Upload local subscriptions to gPodder service - matches GPodder API spec POST /api/2/subscriptions/{username}/{device}.json + async fn upload_subscriptions_to_gpodder(&self, gpodder_url: &str, username: &str, password: &str, device_name: &str, subscriptions: &[String]) -> AppResult<()> { + let upload_url = format!("{}/api/2/subscriptions/{}/{}.json", gpodder_url.trim_end_matches('/'), username, device_name); - let new_sync_type = if new_enabled { - // Restore previous sync type or default to "external" - if !current_status.sync_type.is_empty() && current_status.sync_type != "None" { - current_status.sync_type + // Format subscription changes according to GPodder API spec + let subscription_changes = serde_json::json!({ + "add": subscriptions, + "remove": [] + }); + + // Use correct authentication based on internal vs external + let response = if gpodder_url == "http://localhost:8042" { + // Internal GPodder API - use X-GPodder-Token header + let client = reqwest::Client::new(); + client.post(&upload_url) + .header("X-GPodder-Token", password) + .json(&subscription_changes) + .send() + .await + } else { + // External GPodder API - use session auth with basic fallback + let session = self.create_gpodder_session_with_password(gpodder_url, username, password).await?; + if session.authenticated { + session.client + .post(&upload_url) + .json(&subscription_changes) + .send() + .await } else { - "external".to_string() + session.client + .post(&upload_url) + .basic_auth(username, Some(password)) + .json(&subscription_changes) + .send() + .await } - } else { - "None".to_string() }; - match self { - DatabasePool::Postgres(pool) => { - sqlx::query(r#"UPDATE "Users" SET pod_sync_type = $1 WHERE userid = $2"#) - .bind(&new_sync_type) - .bind(user_id) - .execute(pool) - .await?; + match response { + Ok(resp) if resp.status().is_success() => { + tracing::info!("Successfully uploaded {} subscriptions to gPodder service", subscriptions.len()); + Ok(()) } - DatabasePool::MySQL(pool) => { - sqlx::query("UPDATE Users SET Pod_Sync_Type = ? WHERE UserID = ?") - .bind(&new_sync_type) - .bind(user_id) - .execute(pool) - .await?; + Ok(resp) => { + tracing::warn!("Failed to upload subscriptions to gPodder service: {}", resp.status()); + Ok(()) // Don't fail sync for upload failures + } + Err(e) => { + tracing::warn!("Failed to upload subscriptions: {}", e); + Ok(()) // Don't fail sync for upload failures } } - - Ok(new_enabled) } - // Helper function to get user sync settings - async fn get_user_sync_settings(&self, user_id: i32) -> AppResult> { - match self { - DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#"SELECT gpodderurl, gpodderloginname, gpoddertoken, pod_sync_type FROM "Users" WHERE userid = $1"#) - .bind(user_id) - .fetch_optional(pool) - .await?; - - if let Some(row) = row { - let url: Option = row.try_get("gpodderurl")?; - let username: Option = row.try_get("gpodderloginname")?; - let token: Option = row.try_get("gpoddertoken")?; - let sync_type: Option = row.try_get("pod_sync_type")?; - - if url.is_some() && username.is_some() && token.is_some() { - Ok(Some(UserSyncSettings { - url: url.unwrap(), - username: username.unwrap(), - token: token.unwrap(), - sync_type: sync_type.unwrap_or_default() - })) - } else { - Ok(None) - } + // Sync episode actions with gPodder service - matches Python episode actions sync with timestamp support + async fn sync_episode_actions_with_gpodder(&self, gpodder_url: &str, username: &str, password: &str, device_name: &str, user_id: i32) -> AppResult<()> { + println!("\n========== STARTING EPISODE ACTIONS SYNC =========="); + + // Get last sync timestamp for incremental sync (BETTER than Python - follows GPodder spec) + let since_timestamp = self.get_last_sync_timestamp(user_id).await?; + + println!("📅 Last sync timestamp from DB: {:?}", since_timestamp); + + // Get local episode actions since last sync for efficient incremental sync + let local_actions = if let Some(since) = since_timestamp { + self.get_user_episode_actions_since(user_id, since).await? + } else { + self.get_user_episode_actions(user_id).await? + }; + + // Upload local actions to gPodder service - matches Python POST /api/2/episodes/{username}.json + if !local_actions.is_empty() { + let upload_url = format!("{}/api/2/episodes/{}.json", gpodder_url.trim_end_matches('/'), username); + + // Use correct authentication based on internal vs external + let response = if gpodder_url == "http://localhost:8042" { + // Internal GPodder API - use X-GPodder-Token header + let client = reqwest::Client::new(); + client.post(&upload_url) + .header("X-GPodder-Token", password) + .json(&local_actions) + .send() + .await + } else { + // External GPodder API - use session auth with basic fallback + let session = self.create_gpodder_session_with_password(gpodder_url, username, password).await?; + if session.authenticated { + // Use session-based authentication + session.client + .post(&upload_url) + .json(&local_actions) + .send() + .await } else { - Ok(None) + // Fallback to basic auth + session.client + .post(&upload_url) + .basic_auth(username, Some(password)) + .json(&local_actions) + .send() + .await + } + }; + + match response { + Ok(resp) if resp.status().is_success() => { + tracing::info!("Successfully uploaded {} episode actions", local_actions.len()); + } + Ok(resp) => { + tracing::warn!("Failed to upload episode actions: {}", resp.status()); + } + Err(e) => { + return Err(AppError::internal(&format!("Failed to upload episode actions: {}", e))); } } - DatabasePool::MySQL(pool) => { - let row = sqlx::query("SELECT GpodderUrl, GpodderLoginName, GpodderToken, Pod_Sync_Type FROM Users WHERE UserID = ?") - .bind(user_id) - .fetch_optional(pool) - .await?; - - if let Some(row) = row { - let url: Option = row.try_get("GpodderUrl")?; - let username: Option = row.try_get("GpodderLoginName")?; - let token: Option = row.try_get("GpodderToken")?; - let sync_type: Option = row.try_get("Pod_Sync_Type")?; - - if url.is_some() && username.is_some() && token.is_some() { - Ok(Some(UserSyncSettings { - url: url.unwrap(), - username: username.unwrap(), - token: token.unwrap(), - sync_type: sync_type.unwrap_or_default() - })) + } + + // Download remote actions from gPodder service with pagination support + // The server limits responses to 25k actions, so we need to loop until we get all of them + let mut all_remote_actions = Vec::new(); + const MAX_ACTIONS_PER_BATCH: usize = 25000; + + let initial_since = if let Some(since) = since_timestamp { + since.timestamp() + } else { + 0 + }; + + println!("🔍 GPodder episode actions sync starting with since={} ({})", + initial_since, + if initial_since == 0 { "FULL SYNC" } else { "INCREMENTAL SYNC" }); + println!(" Fetching from ALL devices (no device filter to include NULL device actions)"); + + let mut current_since = initial_since; + + loop { + // DON'T filter by device - get actions from ALL devices including NULL device actions + // The 'since' parameter ensures we only get NEW actions (efficient incremental sync) + let download_url = format!("{}/api/2/episodes/{}.json?since={}", + gpodder_url.trim_end_matches('/'), username, current_since); + + println!("📥 Fetching episode actions from: {}", download_url); + + // Use correct authentication based on internal vs external for download + let response = if gpodder_url == "http://localhost:8042" { + // Internal GPodder API - use X-GPodder-Token header + let client = reqwest::Client::new(); + client.get(&download_url) + .header("X-GPodder-Token", password) + .send() + .await + } else { + // External GPodder API - use session auth with basic fallback + let session = self.create_gpodder_session_with_password(gpodder_url, username, password).await?; + if session.authenticated { + session.client + .get(&download_url) + .send() + .await + } else { + session.client + .get(&download_url) + .basic_auth(username, Some(password)) + .send() + .await + } + }; + + match response { + Ok(resp) if resp.status().is_success() => { + let episode_data: serde_json::Value = resp.json().await + .map_err(|e| AppError::internal(&format!("Failed to parse episode actions response: {}", e)))?; + + let actions = episode_data.get("actions").and_then(|v| v.as_array()).cloned().unwrap_or_default(); + let batch_size = actions.len(); + let new_timestamp = episode_data.get("timestamp").and_then(|v| v.as_i64()).unwrap_or(current_since); + + // Check if changelog-news-168 is in this batch + let has_changelog_168 = actions.iter().any(|a| { + a.get("episode") + .and_then(|e| e.as_str()) + .map(|s| s.contains("changelog-news-168")) + .unwrap_or(false) + }); + + println!("📦 Fetched {} episode actions (since={}, response_timestamp={}) {}", + batch_size, current_since, new_timestamp, + if has_changelog_168 { "🎯 CONTAINS changelog-news-168" } else { "" }); + + // Add actions from this batch + for action in actions { + all_remote_actions.push(action); + } + + // If we got less than MAX_ACTIONS_PER_BATCH, we've reached the end + if batch_size < MAX_ACTIONS_PER_BATCH { + tracing::info!("Reached end of episode actions (got {} < {} limit)", batch_size, MAX_ACTIONS_PER_BATCH); + break; + } + + // Update since to the timestamp from the response for next iteration + if new_timestamp > current_since { + current_since = new_timestamp; + tracing::info!("Updated since timestamp to {} for next batch", current_since); } else { - Ok(None) + tracing::warn!("Timestamp didn't advance, stopping pagination to avoid infinite loop"); + break; } - } else { - Ok(None) + } + Ok(resp) => { + tracing::warn!("Failed to download episode actions: {}", resp.status()); + break; + } + Err(e) => { + return Err(AppError::internal(&format!("Failed to download episode actions: {}", e))); } } } + + println!("✅ Downloaded {} total remote episode actions across all batches", all_remote_actions.len()); + + // Check if changelog-news-168 is in the aggregated actions + let has_changelog_168 = all_remote_actions.iter().any(|a| { + a.get("episode") + .and_then(|e| e.as_str()) + .map(|s| s.contains("changelog-news-168")) + .unwrap_or(false) + }); + + if has_changelog_168 { + println!("🎯 FOUND changelog-news-168 in aggregated remote actions before processing!"); + } else { + println!("❌ changelog-news-168 NOT FOUND in aggregated remote actions!"); + println!(" This means it was never returned by the GPodder API across all batches."); + } + + // Apply all remote actions locally + if !all_remote_actions.is_empty() { + self.apply_remote_episode_actions(user_id, &all_remote_actions).await?; + } + + // Update last sync timestamp for incremental sync (BETTER than Python) + self.update_last_sync_timestamp(user_id).await?; + + Ok(()) } - // Check if podcast exists for user - async fn podcast_exists_for_user(&self, user_id: i32, feed_url: &str) -> AppResult { + // Get user podcast feeds for sync + async fn get_user_podcast_feeds(&self, user_id: i32) -> AppResult> { match self { DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#"SELECT podcastid FROM "Podcasts" WHERE feedurl = $1 AND userid = $2"#) - .bind(feed_url) + let rows = sqlx::query(r#"SELECT feedurl FROM "Podcasts" WHERE userid = $1 AND (username IS NULL OR username = '') AND (password IS NULL OR password = '')"#) .bind(user_id) - .fetch_optional(pool) + .fetch_all(pool) .await?; - Ok(row.is_some()) + let mut feeds = Vec::new(); + for row in rows { + feeds.push(row.try_get::("feedurl")?); + } + Ok(feeds) } DatabasePool::MySQL(pool) => { - let row = sqlx::query("SELECT PodcastID FROM Podcasts WHERE FeedURL = ? AND UserID = ?") - .bind(feed_url) + let rows = sqlx::query("SELECT FeedURL FROM Podcasts WHERE UserID = ? AND (Username IS NULL OR Username = '') AND (Password IS NULL OR Password = '')") .bind(user_id) - .fetch_optional(pool) + .fetch_all(pool) .await?; - Ok(row.is_some()) + let mut feeds = Vec::new(); + for row in rows { + feeds.push(row.try_get::("FeedURL")?); + } + Ok(feeds) } } } - // Get or create default device - matches Python device handling - async fn get_or_create_default_device(&self, user_id: i32) -> AppResult { - let devices = self.gpodder_get_user_devices(user_id).await?; - - for device in &devices { - if device["is_default"].as_bool().unwrap_or(false) { - return Ok(device["name"].as_str().unwrap_or("pinepods").to_string()); + // Apply remote episode actions locally - matches Python apply_episode_actions function exactly + async fn apply_remote_episode_actions(&self, user_id: i32, actions: &[serde_json::Value]) -> AppResult<()> { + let total_actions = actions.len(); + tracing::info!("Processing {} episode actions for user {}", total_actions, user_id); + + let mut applied_count = 0; + let mut not_found_count = 0; + let mut not_found_urls: Vec = Vec::new(); + let mut changelog_168_found = false; + let mut changelog_168_result = String::new(); + + // Process in batches with progress logging + const BATCH_SIZE: usize = 1000; + const LOG_INTERVAL: usize = 500; // Log progress every 500 actions + + // DEBUG: Log the first action to see its structure + if !actions.is_empty() { + tracing::info!("DEBUG: First episode action structure: {}", serde_json::to_string_pretty(&actions[0]).unwrap_or_else(|_| "failed to serialize".to_string())); + } + + for (index, action) in actions.iter().enumerate() { + // Log progress periodically instead of for every action + if index > 0 && index % LOG_INTERVAL == 0 { + tracing::info!("Progress: {}/{} actions processed ({} applied, {} not found)", + index, total_actions, applied_count, not_found_count); + } + if let (Some(episode_url), Some(action_type)) = ( + action["episode"].as_str(), + action["action"].as_str() + ) { + match action_type { + "play" => { + // Handle both integer Unix timestamps and string timestamps + let timestamp_opt = if let Some(timestamp_int) = action["timestamp"].as_i64() { + // Unix timestamp as integer (gpodder standard format) + Some(chrono::NaiveDateTime::from_timestamp_opt(timestamp_int, 0).unwrap_or_else(|| chrono::Utc::now().naive_utc())) + } else if let Some(timestamp_str) = action["timestamp"].as_str() { + // String timestamp (alternative format) + if let Ok(parsed) = chrono::DateTime::parse_from_rfc3339(timestamp_str) { + Some(parsed.naive_utc()) + } else if let Ok(parsed) = chrono::NaiveDateTime::parse_from_str(timestamp_str, "%Y-%m-%dT%H:%M:%S") { + Some(parsed) + } else { + tracing::warn!("Failed to parse timestamp string for episode action: {}", timestamp_str); + None + } + } else { + None + }; + + if let (Some(position), Some(timestamp)) = ( + action["position"].as_i64(), + timestamp_opt + ) { + // Find local episode by URL + if let Some(episode_id) = self.find_episode_by_url(user_id, episode_url).await? { + + // Get episode duration to check if it should be marked as complete + if let Ok(episode_duration) = self.get_episode_duration(episode_id).await { + let position_sec = position as i32; + let remaining_time = episode_duration - position_sec; + + // Mark complete if position is at/beyond the end OR within 60 seconds of completion + if episode_duration > 0 && (position_sec >= episode_duration || remaining_time <= 60) { + // At end or within 1 minute of completion - mark as complete + // GPodder sync only handles regular podcast episodes, never YouTube videos + match self.mark_episode_completed(episode_id, user_id, false).await { + Ok(_) => { + applied_count += 1; + // Only log changelog-news-168 for debugging + if episode_url.contains("changelog-news-168") { + changelog_168_found = true; + changelog_168_result = format!("✅ Marked as completed: {}s/{}s", position_sec, episode_duration); + tracing::info!("✓ Marked changelog-news-168 as completed via GPodder sync"); + } + } + Err(e) => { + if episode_url.contains("changelog-news-168") { + changelog_168_found = true; + changelog_168_result = format!("❌ FAILED to mark complete: {}", e); + } + tracing::debug!("Failed to mark episode as completed: {}", e); + } + } + } else { + // Update progress normally + match self.update_episode_progress(user_id, episode_id, position_sec, timestamp).await { + Ok(_) => { + applied_count += 1; + if episode_url.contains("changelog-news-168") { + changelog_168_found = true; + changelog_168_result = format!("✅ Updated progress: {}s/{}s", position_sec, episode_duration); + tracing::info!("✓ Updated changelog-news-168 progress to {}s/{}s", position_sec, episode_duration); + } + } + Err(e) => { + if episode_url.contains("changelog-news-168") { + changelog_168_found = true; + changelog_168_result = format!("❌ FAILED: {}", e); + } + tracing::debug!("Failed to update episode progress: {}", e); + } + } + } + } else { + // Fallback to normal progress update if duration unavailable + match self.update_episode_progress(user_id, episode_id, position as i32, timestamp).await { + Ok(_) => { + applied_count += 1; + } + Err(e) => { + tracing::debug!("Failed to update episode progress (no duration): {}", e); + } + } + } + } else { + not_found_count += 1; + not_found_urls.push(episode_url.to_string()); + if episode_url.contains("changelog-news-168") { + changelog_168_found = true; + changelog_168_result = "❌ NOT FOUND in database".to_string(); + } + } + } + } + "download" => { + // Handle download actions if needed (currently not implemented) + tracing::debug!("Download action for episode: {}", episode_url); + } + "delete" => { + // Handle delete actions if needed (currently not implemented) + tracing::debug!("Delete action for episode: {}", episode_url); + } + _ => { + tracing::debug!("Unknown action type: {}", action_type); + } + } } } - // Create default device if none exists - matches Python device creation logic - let device_name = format!("pinepods-internal-{}", user_id); - let device_type = "server"; - self.create_gpodder_device(user_id, &device_name, device_type, true).await?; - Ok(device_name) - } + tracing::info!("✅ Episode actions processing complete: {}/{} applied, {} not found in local database", + applied_count, total_actions, not_found_count); - // Create gPodder device - matches Python device creation - async fn create_gpodder_device(&self, user_id: i32, device_name: &str, device_type: &str, is_default: bool) -> AppResult<()> { - match self { - DatabasePool::Postgres(pool) => { - sqlx::query(r#"INSERT INTO "GpodderDevices" (userid, devicename, devicetype, isdefault) VALUES ($1, $2, $3, $4)"#) - .bind(user_id) - .bind(device_name) - .bind(device_type) - .bind(is_default) - .execute(pool) - .await?; - } - DatabasePool::MySQL(pool) => { - sqlx::query("INSERT INTO GpodderDevices (UserID, DeviceName, DeviceType, IsDefault) VALUES (?, ?, ?, ?)") - .bind(user_id) - .bind(device_name) - .bind(device_type) - .bind(is_default) - .execute(pool) - .await?; + // Print changelog-news-168 result + println!("\n========== CHANGELOG-NEWS-168 DEBUG =========="); + if changelog_168_found { + println!("🎯 changelog-news-168 WAS PROCESSED: {}", changelog_168_result); + } else { + println!("⚠️ changelog-news-168 was NOT found in episode actions"); + } + println!("==============================================\n"); + + // Print sample of not found URLs for debugging + if !not_found_urls.is_empty() { + println!("\n========== EPISODE ACTIONS NOT FOUND (first 20) =========="); + for url in not_found_urls.iter().take(20) { + println!("❌ NOT FOUND: {}", url); } + println!("========== END NOT FOUND EPISODES (total: {}) ==========\n", not_found_urls.len()); } + Ok(()) } - // Create gPodder device with caption - matches Python create_device with all fields - pub async fn gpodder_create_device_with_caption(&self, user_id: i32, device_name: &str, device_type: &str, device_caption: Option<&str>, is_default: bool) -> AppResult { + // Find episode ID by URL for user + async fn find_episode_by_url(&self, user_id: i32, episode_url: &str) -> AppResult> { match self { DatabasePool::Postgres(pool) => { let row = sqlx::query(r#" - INSERT INTO "GpodderDevices" (userid, devicename, devicetype, devicecaption, isdefault) - VALUES ($1, $2, $3, $4, $5) - RETURNING deviceid"#) + SELECT e.episodeid + FROM "Episodes" e + JOIN "Podcasts" p ON e.podcastid = p.podcastid + WHERE e.episodeurl = $1 AND p.userid = $2 + "#) + .bind(episode_url) .bind(user_id) - .bind(device_name) - .bind(device_type) - .bind(device_caption) - .bind(is_default) - .fetch_one(pool) + .fetch_optional(pool) .await?; - Ok(row.try_get("deviceid")?) + if let Some(row) = row { + Ok(Some(row.try_get("episodeid")?)) + } else { + Ok(None) + } } DatabasePool::MySQL(pool) => { - let result = sqlx::query("INSERT INTO GpodderDevices (UserID, DeviceName, DeviceType, DeviceCaption, IsDefault) VALUES (?, ?, ?, ?, ?)") + let row = sqlx::query(" + SELECT e.EpisodeID + FROM Episodes e + JOIN Podcasts p ON e.PodcastID = p.PodcastID + WHERE e.EpisodeURL = ? AND p.UserID = ? + ") + .bind(episode_url) .bind(user_id) - .bind(device_name) - .bind(device_type) - .bind(device_caption) - .bind(is_default) - .execute(pool) + .fetch_optional(pool) .await?; - Ok(result.last_insert_id() as i32) + if let Some(row) = row { + Ok(Some(row.try_get("EpisodeID")?)) + } else { + Ok(None) + } } } } - // Get default gPodder device - matches Python get_default_device function exactly - pub async fn gpodder_get_default_device(&self, user_id: i32) -> AppResult> { - match self { + // Update episode progress from remote sync + async fn update_episode_progress(&self, user_id: i32, episode_id: i32, position: i32, timestamp: chrono::NaiveDateTime) -> AppResult<()> { + match self { + DatabasePool::Postgres(pool) => { + // Insert or update episode history + sqlx::query(r#" + INSERT INTO "UserEpisodeHistory" (userid, episodeid, listenduration, listendate) + VALUES ($1, $2, $3, $4) + ON CONFLICT (userid, episodeid) + DO UPDATE SET listenduration = GREATEST("UserEpisodeHistory".listenduration, $3), listendate = $4 + "#) + .bind(user_id) + .bind(episode_id) + .bind(position) + .bind(timestamp) + .execute(pool) + .await?; + } + DatabasePool::MySQL(pool) => { + // Insert or update episode history + sqlx::query(" + INSERT INTO UserEpisodeHistory (UserID, EpisodeID, ListenDuration, ListenDate) + VALUES (?, ?, ?, ?) + ON DUPLICATE KEY UPDATE + ListenDuration = GREATEST(ListenDuration, VALUES(ListenDuration)), + ListenDate = VALUES(ListenDate) + ") + .bind(user_id) + .bind(episode_id) + .bind(position) + .bind(timestamp) + .execute(pool) + .await?; + } + } + Ok(()) + } + + // Get gPodder status - matches Python get_user_gpodder_status function exactly + pub async fn gpodder_get_status(&self, user_id: i32) -> AppResult { + match self { DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#"SELECT deviceid, devicename, devicetype, devicecaption FROM "GpodderDevices" WHERE userid = $1 AND isdefault = true LIMIT 1"#) + let row = sqlx::query(r#"SELECT pod_sync_type, gpodderurl, gpodderloginname FROM "Users" WHERE userid = $1"#) .bind(user_id) .fetch_optional(pool) .await?; if let Some(row) = row { - Ok(Some(serde_json::json!({ - "id": row.try_get::("deviceid")?, - "name": row.try_get::("devicename")?, - "type": row.try_get::("devicetype")?, - "caption": row.try_get::, _>("devicecaption")?, - "last_sync": None::>, - "is_active": true, - "is_remote": false, - "is_default": true - }))) + let sync_type: Option = row.try_get("pod_sync_type")?; + let gpodder_url: Option = row.try_get("gpodderurl")?; + let gpodder_login: Option = row.try_get("gpodderloginname")?; + + let sync_type = sync_type.unwrap_or_else(|| "None".to_string()); + + Ok(GpodderStatus { + sync_type: sync_type.clone(), + gpodder_url, + gpodder_login, + }) } else { - Ok(None) + Ok(GpodderStatus { + sync_type: "None".to_string(), + gpodder_url: None, + gpodder_login: None, + }) } } DatabasePool::MySQL(pool) => { - let row = sqlx::query("SELECT DeviceID, DeviceName, DeviceType, DeviceCaption FROM GpodderDevices WHERE UserID = ? AND IsDefault = 1 LIMIT 1") + let row = sqlx::query("SELECT Pod_Sync_Type, GpodderUrl, GpodderLoginName FROM Users WHERE UserID = ?") .bind(user_id) .fetch_optional(pool) .await?; if let Some(row) = row { - Ok(Some(serde_json::json!({ - "id": row.try_get::("DeviceID")?, - "name": row.try_get::("DeviceName")?, - "type": row.try_get::("DeviceType")?, - "caption": row.try_get::, _>("DeviceCaption")?, - "last_sync": None::>, - "is_active": true, - "is_remote": false, - "is_default": true - }))) + let sync_type: Option = row.try_get("Pod_Sync_Type")?; + let gpodder_url: Option = row.try_get("GpodderUrl")?; + let gpodder_login: Option = row.try_get("GpodderLoginName")?; + + let sync_type = sync_type.unwrap_or_else(|| "None".to_string()); + + Ok(GpodderStatus { + sync_type: sync_type.clone(), + gpodder_url, + gpodder_login, + }) } else { - Ok(None) + Ok(GpodderStatus { + sync_type: "None".to_string(), + gpodder_url: None, + gpodder_login: None, + }) } } } } - - // Sync with Nextcloud - matches Python refresh_nextcloud_subscription function exactly - async fn sync_with_nextcloud(&self, user_id: i32, settings: &UserSyncSettings, _force: bool) -> AppResult { - let client = reqwest::Client::new(); - let decrypted_password = self.decrypt_password(&settings.token).await?; - - // Get subscriptions from Nextcloud gPodder Sync app - let subscriptions_url = format!("{}/index.php/apps/gpoddersync/subscriptions", settings.url.trim_end_matches('/')); - - let response = client - .get(&subscriptions_url) - .basic_auth(&settings.username, Some(&decrypted_password)) - .send() - .await - .map_err(|e| AppError::internal(&format!("Failed to sync with Nextcloud: {}", e)))?; + // Toggle gPodder sync - matches Python toggle_gpodder function exactly + pub async fn gpodder_toggle_sync(&self, user_id: i32) -> AppResult { + let current_status = self.gpodder_get_status(user_id).await?; + let current_enabled = current_status.sync_type != "None" && !current_status.sync_type.is_empty(); + let new_enabled = !current_enabled; - if response.status().is_success() { - let subscriptions: serde_json::Value = response.json().await - .map_err(|e| AppError::internal(&format!("Failed to parse Nextcloud subscriptions: {}", e)))?; - - // Process subscriptions and add missing podcasts - if let Some(feeds) = subscriptions.as_array() { - let feed_urls: Vec = feeds.iter() - .filter_map(|f| f.as_str().map(|s| s.to_string())) - .collect(); - self.process_gpodder_subscriptions(user_id, &feed_urls).await?; + let new_sync_type = if new_enabled { + // Restore previous sync type or default to "external" + if !current_status.sync_type.is_empty() && current_status.sync_type != "None" { + current_status.sync_type + } else { + "external".to_string() } - - Ok(true) } else { - Ok(false) - } - } - - // Decrypt password using Fernet - matches Python encryption - async fn decrypt_password(&self, encrypted_password: &str) -> AppResult { - use fernet::Fernet; - - let encryption_key = self.get_encryption_key().await?; - let fernet = Fernet::new(&encryption_key) - .ok_or_else(|| AppError::internal("Failed to create Fernet cipher"))?; + "None".to_string() + }; - let decrypted = fernet.decrypt(encrypted_password) - .map_err(|e| AppError::internal(&format!("Failed to decrypt password: {}", e)))?; + match self { + DatabasePool::Postgres(pool) => { + sqlx::query(r#"UPDATE "Users" SET pod_sync_type = $1 WHERE userid = $2"#) + .bind(&new_sync_type) + .bind(user_id) + .execute(pool) + .await?; + } + DatabasePool::MySQL(pool) => { + sqlx::query("UPDATE Users SET Pod_Sync_Type = ? WHERE UserID = ?") + .bind(&new_sync_type) + .bind(user_id) + .execute(pool) + .await?; + } + } - String::from_utf8(decrypted) - .map_err(|e| AppError::internal(&format!("Invalid UTF-8 in decrypted password: {}", e))) + Ok(new_enabled) } - - // Get all users with podcasts - for admin refresh - pub async fn get_all_users_with_podcasts(&self) -> AppResult> { + + // Helper function to get user sync settings + pub async fn get_user_sync_settings(&self, user_id: i32) -> AppResult> { match self { DatabasePool::Postgres(pool) => { - let rows = sqlx::query(r#"SELECT DISTINCT userid FROM "Podcasts" ORDER BY userid"#) - .fetch_all(pool) + let row = sqlx::query(r#"SELECT gpodderurl, gpodderloginname, gpoddertoken, pod_sync_type FROM "Users" WHERE userid = $1"#) + .bind(user_id) + .fetch_optional(pool) .await?; - let mut users = Vec::new(); - for row in rows { - users.push(row.try_get("userid")?); + if let Some(row) = row { + let url: Option = row.try_get("gpodderurl")?; + let username: Option = row.try_get("gpodderloginname")?; + let token: Option = row.try_get("gpoddertoken")?; + let sync_type: Option = row.try_get("pod_sync_type")?; + + if url.is_some() && username.is_some() && token.is_some() { + Ok(Some(UserSyncSettings { + url: url.unwrap(), + username: username.unwrap(), + token: token.unwrap(), + sync_type: sync_type.unwrap_or_default() + })) + } else { + Ok(None) + } + } else { + Ok(None) } - Ok(users) } DatabasePool::MySQL(pool) => { - let rows = sqlx::query("SELECT DISTINCT UserID FROM Podcasts ORDER BY UserID") - .fetch_all(pool) + let row = sqlx::query("SELECT GpodderUrl, GpodderLoginName, GpodderToken, Pod_Sync_Type FROM Users WHERE UserID = ?") + .bind(user_id) + .fetch_optional(pool) .await?; - let mut users = Vec::new(); - for row in rows { - users.push(row.try_get("UserID")?); + if let Some(row) = row { + let url: Option = row.try_get("GpodderUrl")?; + let username: Option = row.try_get("GpodderLoginName")?; + let token: Option = row.try_get("GpodderToken")?; + let sync_type: Option = row.try_get("Pod_Sync_Type")?; + + if url.is_some() && username.is_some() && token.is_some() { + Ok(Some(UserSyncSettings { + url: url.unwrap(), + username: username.unwrap(), + token: token.unwrap(), + sync_type: sync_type.unwrap_or_default() + })) + } else { + Ok(None) + } + } else { + Ok(None) } - Ok(users) } } } - - // Get all users with gPodder sync enabled - for admin gPodder sync - pub async fn get_all_users_with_gpodder_sync(&self) -> AppResult> { + + // Check if podcast exists for user + async fn podcast_exists_for_user(&self, user_id: i32, feed_url: &str) -> AppResult { match self { DatabasePool::Postgres(pool) => { - let rows = sqlx::query(r#" - SELECT userid FROM "Users" - WHERE pod_sync_type IS NOT NULL - AND pod_sync_type != 'None' - AND gpodderurl IS NOT NULL - AND gpodderloginname IS NOT NULL - AND gpoddertoken IS NOT NULL - ORDER BY userid - "#) - .fetch_all(pool) + let row = sqlx::query(r#"SELECT podcastid FROM "Podcasts" WHERE feedurl = $1 AND userid = $2"#) + .bind(feed_url) + .bind(user_id) + .fetch_optional(pool) .await?; - let mut users = Vec::new(); - for row in rows { - users.push(row.try_get("userid")?); - } - Ok(users) + Ok(row.is_some()) } DatabasePool::MySQL(pool) => { - let rows = sqlx::query(" - SELECT UserID FROM Users - WHERE Pod_Sync_Type IS NOT NULL - AND Pod_Sync_Type != 'None' - AND GpodderUrl IS NOT NULL - AND GpodderLoginName IS NOT NULL - AND GpodderToken IS NOT NULL - ORDER BY UserID - ") - .fetch_all(pool) + let row = sqlx::query("SELECT PodcastID FROM Podcasts WHERE FeedURL = ? AND UserID = ?") + .bind(feed_url) + .bind(user_id) + .fetch_optional(pool) .await?; - let mut users = Vec::new(); - for row in rows { - users.push(row.try_get("UserID")?); - } - Ok(users) + Ok(row.is_some()) } } } - - // Get podcast values from RSS feed - matches Python get_podcast_values function exactly - pub async fn get_podcast_values(&self, feed_url: &str, user_id: i32, username: Option<&str>, password: Option<&str>) -> AppResult> { - use reqwest::header::AUTHORIZATION; - use feed_rs::parser; + + // Get or create default device - matches Python device handling + pub async fn get_or_create_default_device(&self, user_id: i32) -> AppResult { + // Get the default device name from Users table - this is where PinePods tracks user preferences + let default_device_name = match self { + DatabasePool::Postgres(pool) => { + let row = sqlx::query(r#"SELECT defaultgpodderdevice FROM "Users" WHERE userid = $1"#) + .bind(user_id) + .fetch_optional(pool) + .await?; + + row.and_then(|r| r.try_get::, _>("defaultgpodderdevice").ok().flatten()) + } + DatabasePool::MySQL(pool) => { + let row = sqlx::query("SELECT DefaultGpodderDevice FROM Users WHERE UserID = ?") + .bind(user_id) + .fetch_optional(pool) + .await?; + + row.and_then(|r| r.try_get::, _>("DefaultGpodderDevice").ok().flatten()) + } + }; - println!("Fetching podcast values from feed URL: {}", feed_url); + // If we have a default device name from Users table, use it + if let Some(device_name) = default_device_name { + return Ok(device_name); + } - // Build HTTP client with optional authentication - let client = reqwest::Client::new(); - let mut request = client.get(feed_url); + // Fallback: check sync settings to determine appropriate default + if let Some(sync_settings) = self.get_user_sync_settings(user_id).await? { + match sync_settings.sync_type.as_str() { + "external" => { + // For external servers, we should not create devices - they must exist on the external server + return Err(AppError::BadRequest("No default device configured for external GPodder sync. Please configure a default device.".to_string())); + } + "gpodder" | "both" => { + // For internal sync, create a default internal device + let device_name = format!("pinepods-internal-{}", user_id); + let device_type = "server"; + self.create_gpodder_device(user_id, &device_name, device_type, true).await?; + + // Set this as the default in Users table + match self { + DatabasePool::Postgres(pool) => { + sqlx::query(r#"UPDATE "Users" SET defaultgpodderdevice = $1 WHERE userid = $2"#) + .bind(&device_name) + .bind(user_id) + .execute(pool) + .await?; + } + DatabasePool::MySQL(pool) => { + sqlx::query("UPDATE Users SET DefaultGpodderDevice = ? WHERE UserID = ?") + .bind(&device_name) + .bind(user_id) + .execute(pool) + .await?; + } + } + + return Ok(device_name); + } + _ => { + return Err(AppError::BadRequest("GPodder sync not properly configured".to_string())); + } + } + } - if let (Some(user), Some(pass)) = (username, password) { - use base64::Engine; - let encoded = base64::engine::general_purpose::STANDARD.encode(format!("{}:{}", user, pass)); - request = request.header(AUTHORIZATION, format!("Basic {}", encoded)); + Err(AppError::BadRequest("No GPodder sync configured".to_string())) + } + + // Create gPodder device - matches Python device creation + async fn create_gpodder_device(&self, user_id: i32, device_name: &str, device_type: &str, is_default: bool) -> AppResult<()> { + match self { + DatabasePool::Postgres(pool) => { + // Use INSERT ... ON CONFLICT to handle existing devices + sqlx::query(r#" + INSERT INTO "GpodderDevices" (userid, devicename, devicetype, isdefault) + VALUES ($1, $2, $3, $4) + ON CONFLICT (userid, devicename) + DO UPDATE SET devicetype = EXCLUDED.devicetype, isdefault = EXCLUDED.isdefault + "#) + .bind(user_id) + .bind(device_name) + .bind(device_type) + .bind(is_default) + .execute(pool) + .await?; + } + DatabasePool::MySQL(pool) => { + // Use INSERT ... ON DUPLICATE KEY UPDATE to handle existing devices + sqlx::query(" + INSERT INTO GpodderDevices (UserID, DeviceName, DeviceType, IsDefault) + VALUES (?, ?, ?, ?) + ON DUPLICATE KEY UPDATE DeviceType = VALUES(DeviceType), IsDefault = VALUES(IsDefault) + ") + .bind(user_id) + .bind(device_name) + .bind(device_type) + .bind(is_default) + .execute(pool) + .await?; + } + } + Ok(()) + } + + // Create gPodder device with caption - matches Python create_device with all fields + pub async fn gpodder_create_device_with_caption(&self, user_id: i32, device_name: &str, device_type: &str, device_caption: Option<&str>, is_default: bool) -> AppResult { + match self { + DatabasePool::Postgres(pool) => { + let row = sqlx::query(r#" + INSERT INTO "GpodderDevices" (userid, devicename, devicetype, devicecaption, isdefault) + VALUES ($1, $2, $3, $4, $5) + RETURNING deviceid"#) + .bind(user_id) + .bind(device_name) + .bind(device_type) + .bind(device_caption) + .bind(is_default) + .fetch_one(pool) + .await?; + + Ok(row.try_get("deviceid")?) + } + DatabasePool::MySQL(pool) => { + let result = sqlx::query("INSERT INTO GpodderDevices (UserID, DeviceName, DeviceType, DeviceCaption, IsDefault) VALUES (?, ?, ?, ?, ?)") + .bind(user_id) + .bind(device_name) + .bind(device_type) + .bind(device_caption) + .bind(is_default) + .execute(pool) + .await?; + + Ok(result.last_insert_id() as i32) + } } + } + + // Get default gPodder device - matches Python get_default_device function exactly + pub async fn gpodder_get_default_device(&self, user_id: i32) -> AppResult> { + // Get the default device name from Users table + let default_device_name = match self { + DatabasePool::Postgres(pool) => { + let row = sqlx::query(r#"SELECT defaultgpodderdevice FROM "Users" WHERE userid = $1"#) + .bind(user_id) + .fetch_optional(pool) + .await?; + + row.and_then(|r| r.try_get::, _>("defaultgpodderdevice").ok().flatten()) + } + DatabasePool::MySQL(pool) => { + let row = sqlx::query("SELECT DefaultGpodderDevice FROM Users WHERE UserID = ?") + .bind(user_id) + .fetch_optional(pool) + .await?; + + row.and_then(|r| r.try_get::, _>("DefaultGpodderDevice").ok().flatten()) + } + }; - // Fetch RSS feed - let response = request.send().await?; - if !response.status().is_success() { - return Err(AppError::external_error(&format!("Failed to fetch RSS feed: {}", response.status()))); + if let Some(device_name) = default_device_name { + // Get all devices from GPodder API and find the one with matching name + let devices = self.gpodder_get_user_devices(user_id).await?; + + for device in devices { + if device.get("name").and_then(|v| v.as_str()) == Some(&device_name) { + // Mark this device as default and return it + let mut default_device = device; + default_device["is_default"] = serde_json::Value::Bool(true); + return Ok(Some(default_device)); + } + } } - let content = response.text().await?; + // If no default device found, return None + Ok(None) + } + + + // Sync with Nextcloud - matches Python refresh_nextcloud_subscription function exactly + async fn sync_with_nextcloud(&self, user_id: i32, settings: &UserSyncSettings, _force: bool) -> AppResult { + let client = reqwest::Client::new(); + let decrypted_password = self.decrypt_password(&settings.token).await?; - // Parse RSS feed using feed-rs - let feed = parser::parse(content.as_bytes()) - .map_err(|e| AppError::external_error(&format!("Failed to parse RSS feed: {}", e)))?; + // Step 1: Get last sync timestamp for incremental sync + let since_timestamp = self.get_last_sync_timestamp(user_id).await?; - // Extract podcast metadata exactly as Python implementation - let mut podcast_values = std::collections::HashMap::new(); + // Step 2: Get subscriptions from Nextcloud gPodder Sync app with timestamp + let subscriptions_url = if let Some(since) = since_timestamp { + format!("{}/index.php/apps/gpoddersync/subscription_changes/{}?since={}", + settings.url.trim_end_matches('/'), since.timestamp(), since.timestamp()) + } else { + format!("{}/index.php/apps/gpoddersync/subscriptions", settings.url.trim_end_matches('/')) + }; - podcast_values.insert("feedurl".to_string(), feed_url.to_string()); - podcast_values.insert("userid".to_string(), user_id.to_string()); - podcast_values.insert("podcastname".to_string(), feed.title.as_ref().map(|t| t.content.clone()).unwrap_or_default()); - podcast_values.insert("description".to_string(), feed.description.as_ref().map(|d| d.content.clone()).unwrap_or_default()); - podcast_values.insert("author".to_string(), feed.authors.first().map(|a| a.name.clone()).unwrap_or_default()); - podcast_values.insert("websiteurl".to_string(), feed.links.first().map(|l| l.href.clone()).unwrap_or_default()); - podcast_values.insert("explicit".to_string(), "False".to_string()); // Default to False - podcast_values.insert("episodecount".to_string(), feed.entries.len().to_string()); + tracing::info!("Getting Nextcloud subscriptions from: {}", subscriptions_url); - // Extract artwork URL - check feed image and iTunes image - let artwork_url = feed.logo.as_ref().map(|l| l.uri.clone()) - .or_else(|| feed.icon.as_ref().map(|i| i.uri.clone())) - .unwrap_or_default(); - podcast_values.insert("artworkurl".to_string(), artwork_url); + let response = client + .get(&subscriptions_url) + .basic_auth(&settings.username, Some(&decrypted_password)) + .send() + .await + .map_err(|e| AppError::internal(&format!("Failed to sync with Nextcloud: {}", e)))?; - // Extract categories - convert to dict format like Python - let categories = if !feed.categories.is_empty() { - let cat_dict: std::collections::HashMap = feed.categories - .iter() - .enumerate() - .map(|(i, cat)| (i.to_string(), cat.term.clone())) - .collect(); - serde_json::to_string(&cat_dict).unwrap_or_default() + let mut subscriptions_processed = false; + + if response.status().is_success() { + let subscriptions_response: serde_json::Value = response.json().await + .map_err(|e| AppError::internal(&format!("Failed to parse Nextcloud subscriptions: {}", e)))?; + + // Handle both subscription change format and direct subscription list + let (subscriptions, removals) = if since_timestamp.is_some() { + // Incremental sync - expect {add: [], remove: [], timestamp: N} format + let adds = subscriptions_response["add"].as_array() + .unwrap_or(&vec![]) + .iter() + .filter_map(|f| f.as_str().map(|s| s.to_string())) + .collect::>(); + + let removes = subscriptions_response["remove"].as_array() + .unwrap_or(&vec![]) + .iter() + .filter_map(|f| f.as_str().map(|s| s.to_string())) + .collect::>(); + + (adds, removes) + } else { + // Full sync - expect direct array of subscription URLs, no removals in this format + let adds = subscriptions_response.as_array() + .unwrap_or(&vec![]) + .iter() + .filter_map(|f| f.as_str().map(|s| s.to_string())) + .collect::>(); + + (adds, Vec::new()) + }; + + tracing::info!("Downloaded {} subscriptions and {} removals from Nextcloud", subscriptions.len(), removals.len()); + + // Process subscriptions and add missing podcasts + if !subscriptions.is_empty() { + self.process_gpodder_subscriptions(user_id, &subscriptions).await?; + subscriptions_processed = true; + } + + // Process subscription removals + if !removals.is_empty() { + self.process_gpodder_subscription_removals(user_id, &removals).await?; + subscriptions_processed = true; // Mark as processed since we did work + } + } + + // Step 3: Get episode actions from Nextcloud with timestamp + let episode_actions_url = if let Some(since) = since_timestamp { + format!("{}/index.php/apps/gpoddersync/episode_action?since={}", + settings.url.trim_end_matches('/'), since.timestamp()) } else { - "{}".to_string() + format!("{}/index.php/apps/gpoddersync/episode_action", settings.url.trim_end_matches('/')) }; - podcast_values.insert("categories".to_string(), categories); - // Set default values for fields not in RSS - podcast_values.insert("podcastindexid".to_string(), "0".to_string()); - podcast_values.insert("episodeupdatecount".to_string(), "0".to_string()); + tracing::info!("Getting Nextcloud episode actions from: {}", episode_actions_url); - println!("Successfully extracted podcast values: {}", podcast_values.get("podcastname").unwrap_or(&"Unknown".to_string())); + let episode_response = client + .get(&episode_actions_url) + .basic_auth(&settings.username, Some(&decrypted_password)) + .send() + .await; - Ok(podcast_values) - } - - // Add podcast from RSS values - wrapper function for custom podcast addition - pub async fn add_podcast_from_values(&self, podcast_values: &std::collections::HashMap, user_id: i32, feed_cutoff: i32) -> AppResult<(i32, Option)> { - // Convert HashMap values to PodcastValues struct - let podcast_data = crate::handlers::podcasts::PodcastValues { - user_id, - pod_title: podcast_values.get("podcastname").unwrap_or(&"".to_string()).clone(), - pod_artwork: podcast_values.get("artworkurl").unwrap_or(&"".to_string()).clone(), - pod_author: podcast_values.get("author").unwrap_or(&"".to_string()).clone(), - categories: std::collections::HashMap::new(), // Parse from string if needed - pod_description: podcast_values.get("description").unwrap_or(&"".to_string()).clone(), - pod_episode_count: podcast_values.get("episodecount").unwrap_or(&"0".to_string()).parse().unwrap_or(0), - pod_feed_url: podcast_values.get("feedurl").unwrap_or(&"".to_string()).clone(), - pod_website: podcast_values.get("websiteurl").unwrap_or(&"".to_string()).clone(), - pod_explicit: podcast_values.get("explicit").unwrap_or(&"False".to_string()) == "True", - }; - - let podcast_index_id = podcast_values.get("podcastindexid") - .unwrap_or(&"0".to_string()) - .parse::() - .unwrap_or(0); - - self.add_podcast(&podcast_data, podcast_index_id, None, None).await - } - - // // Get podcast details - matches Python get_podcast_details function exactly - // pub async fn get_podcast_details(&self, user_id: i32, podcast_id: i32) -> AppResult> { - // println!("Getting podcast details for podcast {} and user {}", podcast_id, user_id); + let mut episode_actions_processed = false; - // let details = match self { - // DatabasePool::Postgres(pool) => { - // // Try to get podcast for user first, then fall back to UserID=1 - // let mut row = sqlx::query(r#"SELECT * FROM "Podcasts" WHERE podcastid = $1 AND userid = $2"#) - // .bind(podcast_id) - // .bind(user_id) - // .fetch_optional(pool) - // .await?; - - // if row.is_none() { - // row = sqlx::query(r#"SELECT * FROM "Podcasts" WHERE podcastid = $1 AND userid = 1"#) - // .bind(podcast_id) - // .fetch_optional(pool) - // .await?; - // } + if let Ok(resp) = episode_response { + if resp.status().is_success() { + let episode_actions: serde_json::Value = resp.json().await + .map_err(|e| AppError::internal(&format!("Failed to parse Nextcloud episode actions: {}", e)))?; - // if let Some(row) = row { - // // Get episode count from YouTubeVideos table if this is a YouTube channel - // let mut episode_count = row.try_get::("episodecount")?; - // let is_youtube_channel = row.try_get::("isyoutubechannel").unwrap_or(false); + if let Some(actions_array) = episode_actions["actions"].as_array() { + tracing::info!("Downloaded {} episode actions from Nextcloud", actions_array.len()); - // if is_youtube_channel { - // let count_result = sqlx::query(r#"SELECT COUNT(*) as count FROM "YouTubeVideos" WHERE podcastid = $1"#) - // .bind(podcast_id) - // .fetch_one(pool) - // .await?; - // episode_count = count_result.try_get::("count")? as i32; - // } - - // Some(serde_json::json!({ - // "podcastid": row.try_get::("podcastid")?, - // "podcastindexid": row.try_get::, _>("podcastindexid")?, - // "podcastname": row.try_get::("podcastname")?, - // "artworkurl": row.try_get::("artworkurl")?, - // "author": row.try_get::("author")?, - // "categories": row.try_get::, _>("categories")?, - // "description": row.try_get::("description")?, - // "episodecount": episode_count, - // "feedurl": row.try_get::("feedurl")?, - // "websiteurl": row.try_get::("websiteurl")?, - // "explicit": row.try_get::("explicit")?, - // "userid": row.try_get::("userid")?, - // "autodownload": row.try_get::("autodownload").unwrap_or(false), - // "startskip": row.try_get::("startskip").unwrap_or(0), - // "endskip": row.try_get::("endskip").unwrap_or(0), - // "username": row.try_get::, _>("username")?, - // "password": row.try_get::, _>("password")?, - // "isyoutubechannel": is_youtube_channel, - // "notificationsenabled": row.try_get::("notificationsenabled").unwrap_or(false), - // "feedcutoffdays": row.try_get::("feedcutoffdays").unwrap_or(0), - // })) - // } else { - // None - // } - // } - // DatabasePool::MySQL(pool) => { - // // Try to get podcast for user first, then fall back to UserID=1 - // let mut row = sqlx::query("SELECT * FROM Podcasts WHERE PodcastID = ? AND UserID = ?") - // .bind(podcast_id) - // .bind(user_id) - // .fetch_optional(pool) - // .await?; - - // if row.is_none() { - // row = sqlx::query("SELECT * FROM Podcasts WHERE PodcastID = ? AND UserID = 1") - // .bind(podcast_id) - // .fetch_optional(pool) - // .await?; - // } - - // if let Some(row) = row { - // // Get episode count from YouTubeVideos table if this is a YouTube channel - // let mut episode_count = row.try_get::("EpisodeCount")?; - // let is_youtube_channel = row.try_get::("IsYouTubeChannel").unwrap_or(0) != 0; + if !actions_array.is_empty() { + if let Err(e) = self.apply_remote_episode_actions(user_id, actions_array).await { + tracing::warn!("Nextcloud episode actions processing failed but continuing: {}", e); + } else { + episode_actions_processed = true; + } + } + } else if let Some(actions_array) = episode_actions.as_array() { + // Some Nextcloud implementations return direct array + tracing::info!("Downloaded {} episode actions from Nextcloud (direct)", actions_array.len()); - // if is_youtube_channel { - // let count_result = sqlx::query("SELECT COUNT(*) as count FROM YouTubeVideos WHERE PodcastID = ?") - // .bind(podcast_id) - // .fetch_one(pool) - // .await?; - // episode_count = count_result.try_get::("count")? as i32; - // } + if !actions_array.is_empty() { + if let Err(e) = self.apply_remote_episode_actions(user_id, actions_array).await { + tracing::warn!("Nextcloud episode actions processing failed but continuing: {}", e); + } else { + episode_actions_processed = true; + } + } + } + } else { + tracing::warn!("Nextcloud episode actions returned error: {}", resp.status()); + } + } else { + tracing::warn!("Failed to get episode actions from Nextcloud"); + } + + // Step 4: Upload local subscriptions to Nextcloud (if needed) + if since_timestamp.is_none() || subscriptions_processed { + let local_subscriptions = self.get_user_podcast_feeds(user_id).await?; + if let Err(e) = self.upload_subscriptions_to_nextcloud(&settings.url, &settings.username, &decrypted_password, &local_subscriptions).await { + tracing::warn!("Failed to upload subscriptions to Nextcloud: {}", e); + } else { + tracing::info!("Successfully uploaded {} subscriptions to Nextcloud", local_subscriptions.len()); + } + } else { + tracing::info!("Skipping subscription upload to Nextcloud - no changes detected"); + } + + // Step 5: Upload local episode actions to Nextcloud + let local_episode_actions = self.get_user_episode_actions(user_id).await?; + if !local_episode_actions.is_empty() { + if let Err(e) = self.upload_episode_actions_to_nextcloud(&settings.url, &settings.username, &decrypted_password, &local_episode_actions).await { + tracing::warn!("Failed to upload episode actions to Nextcloud: {}", e); + } else { + tracing::info!("Successfully uploaded {} episode actions to Nextcloud", local_episode_actions.len()); + } + } else { + tracing::info!("No local episode actions to upload to Nextcloud"); + } + + // Step 6: Update last sync timestamp for next incremental sync + self.update_last_sync_timestamp(user_id).await?; + + Ok(subscriptions_processed || episode_actions_processed) + } - // Some(serde_json::json!({ - // "podcastid": row.try_get::("PodcastID")?, - // "podcastindexid": row.try_get::, _>("PodcastIndexID")?, - // "podcastname": row.try_get::("PodcastName")?, - // "artworkurl": row.try_get::("ArtworkURL")?, - // "author": row.try_get::("Author")?, - // "categories": row.try_get::, _>("Categories")?, - // "description": row.try_get::("Description")?, - // "episodecount": episode_count, - // "feedurl": row.try_get::("FeedURL")?, - // "websiteurl": row.try_get::("WebsiteURL")?, - // "explicit": row.try_get::("Explicit").unwrap_or(0) != 0, - // "userid": row.try_get::("UserID")?, - // "autodownload": row.try_get::("AutoDownload").unwrap_or(0) != 0, - // "startskip": row.try_get::("StartSkip").unwrap_or(0), - // "endskip": row.try_get::("EndSkip").unwrap_or(0), - // "username": row.try_get::, _>("Username")?, - // "password": row.try_get::, _>("Password")?, - // "isyoutubechannel": is_youtube_channel, - // "notificationsenabled": row.try_get::("NotificationsEnabled").unwrap_or(0) != 0, - // "feedcutoffdays": row.try_get::("FeedCutoffDays").unwrap_or(0), - // })) - // } else { - // None - // } - // } - // }; + // Decrypt password using Fernet - matches Python encryption + pub async fn decrypt_password(&self, encrypted_password: &str) -> AppResult { + use fernet::Fernet; - // if let Some(ref result) = details { - // println!("Found podcast details for: {}", result["podcast_name"]); - // } else { - // println!("No podcast found with ID {} for user {}", podcast_id, user_id); - // } + // Get encryption key from app settings (base64 string) + let encryption_key = self.get_encryption_key().await?; + let fernet = Fernet::new(&encryption_key) + .ok_or_else(|| AppError::internal("Failed to create Fernet cipher"))?; - // Ok(details) - // } - - // Get notification settings - matches Python get_notification_settings function exactly - pub async fn get_notification_settings(&self, user_id: i32) -> AppResult> { - println!("Getting notification settings for user {}", user_id); + let decrypted = fernet.decrypt(encrypted_password) + .map_err(|_e| AppError::internal(&format!("Failed to decrypt password: Fernet decryption error")))?; - let settings = match self { + String::from_utf8(decrypted) + .map_err(|e| AppError::internal(&format!("Invalid UTF-8 in decrypted password: {}", e))) + } + + // Get all users with podcasts - for admin refresh + pub async fn get_all_users_with_podcasts(&self) -> AppResult> { + match self { + DatabasePool::Postgres(pool) => { + let rows = sqlx::query(r#"SELECT DISTINCT userid FROM "Podcasts" ORDER BY userid"#) + .fetch_all(pool) + .await?; + + let mut users = Vec::new(); + for row in rows { + users.push(row.try_get("userid")?); + } + Ok(users) + } + DatabasePool::MySQL(pool) => { + let rows = sqlx::query("SELECT DISTINCT UserID FROM Podcasts ORDER BY UserID") + .fetch_all(pool) + .await?; + + let mut users = Vec::new(); + for row in rows { + users.push(row.try_get("UserID")?); + } + Ok(users) + } + } + } + + // Get all users with gPodder sync enabled - for admin gPodder sync + pub async fn get_all_users_with_gpodder_sync(&self) -> AppResult> { + match self { DatabasePool::Postgres(pool) => { let rows = sqlx::query(r#" - SELECT platform, enabled, ntfytopic, ntfyserverurl, ntfyusername, ntfypassword, ntfyaccesstoken, gotifyurl, gotifytoken - FROM "UserNotificationSettings" - WHERE userid = $1 + SELECT userid FROM "Users" + WHERE pod_sync_type IS NOT NULL + AND pod_sync_type != 'None' + AND pod_sync_type != 'nextcloud' + AND gpodderurl IS NOT NULL + AND gpodderloginname IS NOT NULL + AND gpoddertoken IS NOT NULL + ORDER BY userid "#) - .bind(user_id) .fetch_all(pool) .await?; - let mut settings = Vec::new(); + let mut users = Vec::new(); for row in rows { - let setting = serde_json::json!({ - "platform": row.try_get::("platform")?, - "enabled": row.try_get::("enabled")?, - "ntfy_topic": row.try_get::, _>("ntfytopic")?, - "ntfy_server_url": row.try_get::, _>("ntfyserverurl")?, - "ntfy_username": row.try_get::, _>("ntfyusername")?, - "ntfy_password": row.try_get::, _>("ntfypassword")?, - "ntfy_access_token": row.try_get::, _>("ntfyaccesstoken")?, - "gotify_url": row.try_get::, _>("gotifyurl")?, - "gotify_token": row.try_get::, _>("gotifytoken")? - }); - settings.push(setting); + users.push(row.try_get("userid")?); } - settings + Ok(users) } DatabasePool::MySQL(pool) => { let rows = sqlx::query(" - SELECT Platform, Enabled, NtfyTopic, NtfyServerURL, NtfyUsername, NtfyPassword, NtfyAccessToken, GotifyURL, GotifyToken - FROM UserNotificationSettings - WHERE UserID = ? - ORDER BY Platform + SELECT UserID FROM Users + WHERE Pod_Sync_Type IS NOT NULL + AND Pod_Sync_Type != 'None' + AND Pod_Sync_Type != 'nextcloud' + AND GpodderUrl IS NOT NULL + AND GpodderLoginName IS NOT NULL + AND GpodderToken IS NOT NULL + ORDER BY UserID ") - .bind(user_id) .fetch_all(pool) .await?; - let mut settings = Vec::new(); + let mut users = Vec::new(); for row in rows { - let setting = serde_json::json!({ - "platform": row.try_get::("Platform")?, - "enabled": row.try_get::("Enabled")?, - "ntfy_topic": row.try_get::, _>("NtfyTopic")?, - "ntfy_server_url": row.try_get::, _>("NtfyServerURL")?, - "ntfy_username": row.try_get::, _>("NtfyUsername")?, - "ntfy_password": row.try_get::, _>("NtfyPassword")?, - "ntfy_access_token": row.try_get::, _>("NtfyAccessToken")?, - "gotify_url": row.try_get::, _>("GotifyURL")?, - "gotify_token": row.try_get::, _>("GotifyToken")? - }); - settings.push(setting); + users.push(row.try_get("UserID")?); } - settings + Ok(users) } - }; - - println!("Found {} notification settings for user {}", settings.len(), user_id); - Ok(settings) + } } - // Update notification settings - matches Python update_notification_settings function exactly - pub async fn update_notification_settings(&self, user_id: i32, platform: &str, enabled: bool, ntfy_topic: Option<&str>, ntfy_server_url: Option<&str>, ntfy_username: Option<&str>, ntfy_password: Option<&str>, ntfy_access_token: Option<&str>, gotify_url: Option<&str>, gotify_token: Option<&str>) -> AppResult { - println!("Updating notification settings for user {} platform {}", user_id, platform); - - // Check if settings exist for this user/platform combination and perform update/insert - let success = match self { + // Get stored authentication credentials for a feed URL + pub async fn get_feed_auth_credentials(&self, feed_url: &str, user_id: i32) -> AppResult<(Option, Option)> { + match self { DatabasePool::Postgres(pool) => { - let existing = sqlx::query(r#"SELECT COUNT(*) as count FROM "UserNotificationSettings" WHERE userid = $1 AND platform = $2"#) + let result = sqlx::query(r#"SELECT username, password FROM "Podcasts" WHERE feedurl = $1 AND userid = $2 LIMIT 1"#) + .bind(feed_url) .bind(user_id) - .bind(platform) - .fetch_one(pool) + .fetch_optional(pool) .await?; - let count: i64 = existing.try_get("count")?; + if let Some(row) = result { + let username: Option = row.try_get("username").ok().flatten(); + let password: Option = row.try_get("password").ok().flatten(); + Ok((username, password)) + } else { + Ok((None, None)) + } + } + DatabasePool::MySQL(pool) => { + let result = sqlx::query("SELECT Username, Password FROM Podcasts WHERE FeedURL = ? AND UserID = ? LIMIT 1") + .bind(feed_url) + .bind(user_id) + .fetch_optional(pool) + .await?; - if count > 0 { - // Update existing record - let result = sqlx::query(r#" - UPDATE "UserNotificationSettings" - SET enabled = $3, ntfytopic = $4, ntfyserverurl = $5, ntfyusername = $6, ntfypassword = $7, ntfyaccesstoken = $8, gotifyurl = $9, gotifytoken = $10 - WHERE userid = $1 AND platform = $2 - "#) - .bind(user_id) - .bind(platform) - .bind(enabled) - .bind(ntfy_topic) - .bind(ntfy_server_url) - .bind(ntfy_username) - .bind(ntfy_password) - .bind(ntfy_access_token) - .bind(gotify_url) - .bind(gotify_token) - .execute(pool) - .await?; - result.rows_affected() > 0 + if let Some(row) = result { + let username: Option = row.try_get("Username").ok().flatten(); + let password: Option = row.try_get("Password").ok().flatten(); + Ok((username, password)) } else { - // Insert new record - let result = sqlx::query(r#" - INSERT INTO "UserNotificationSettings" - (userid, platform, enabled, ntfytopic, ntfyserverurl, ntfyusername, ntfypassword, ntfyaccesstoken, gotifyurl, gotifytoken) - VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) - "#) - .bind(user_id) - .bind(platform) - .bind(enabled) - .bind(ntfy_topic) - .bind(ntfy_server_url) - .bind(ntfy_username) - .bind(ntfy_password) - .bind(ntfy_access_token) - .bind(gotify_url) - .bind(gotify_token) - .execute(pool) - .await?; - result.rows_affected() > 0 - } - } - DatabasePool::MySQL(pool) => { - let existing = sqlx::query("SELECT COUNT(*) as count FROM UserNotificationSettings WHERE UserID = ? AND Platform = ?") - .bind(user_id) - .bind(platform) - .fetch_one(pool) - .await?; - - let count: i64 = existing.try_get("count")?; - - if count > 0 { - // Update existing record - let result = sqlx::query(" - UPDATE UserNotificationSettings - SET Enabled = ?, NtfyTopic = ?, NtfyServerURL = ?, NtfyUsername = ?, NtfyPassword = ?, NtfyAccessToken = ?, GotifyURL = ?, GotifyToken = ? - WHERE UserID = ? AND Platform = ? - ") - .bind(enabled) - .bind(ntfy_topic) - .bind(ntfy_server_url) - .bind(ntfy_username) - .bind(ntfy_password) - .bind(ntfy_access_token) - .bind(gotify_url) - .bind(gotify_token) - .bind(user_id) - .bind(platform) - .execute(pool) - .await?; - result.rows_affected() > 0 - } else { - // Insert new record - let result = sqlx::query(" - INSERT INTO UserNotificationSettings - (UserID, Platform, Enabled, NtfyTopic, NtfyServerURL, NtfyUsername, NtfyPassword, NtfyAccessToken, GotifyURL, GotifyToken) - VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?) - ") - .bind(user_id) - .bind(platform) - .bind(enabled) - .bind(ntfy_topic) - .bind(ntfy_server_url) - .bind(ntfy_username) - .bind(ntfy_password) - .bind(ntfy_access_token) - .bind(gotify_url) - .bind(gotify_token) - .execute(pool) - .await?; - result.rows_affected() > 0 + Ok((None, None)) } } - }; - - println!("Successfully updated notification settings for user {} platform {}: {}", user_id, platform, success); - Ok(success) + } } - // Add OIDC provider - matches Python add_oidc_provider function exactly - pub async fn add_oidc_provider(&self, provider_name: &str, client_id: &str, client_secret: &str, authorization_url: &str, token_url: &str, user_info_url: &str, button_text: &str, scope: &str, button_color: &str, button_text_color: &str, icon_svg: &str, name_claim: &str, email_claim: &str, username_claim: &str, roles_claim: &str, user_role: &str, admin_role: &str) -> AppResult { - println!("Adding OIDC provider: {}", provider_name); + // Get podcast values from RSS feed - matches Python get_podcast_values function exactly + pub async fn get_podcast_values(&self, feed_url: &str, user_id: i32, username: Option<&str>, password: Option<&str>) -> AppResult> { + use reqwest::header::AUTHORIZATION; + use feed_rs::parser; - let provider_id = match self { - DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#" - INSERT INTO "OIDCProviders" ( - providername, clientid, clientsecret, authorizationurl, - tokenurl, userinfourl, buttontext, scope, - buttoncolor, buttontextcolor, iconsvg, nameclaim, emailclaim, - usernameclaim, rolesclaim, userrole, adminrole - ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17) - RETURNING providerid - "#) - .bind(provider_name) - .bind(client_id) - .bind(client_secret) - .bind(authorization_url) - .bind(token_url) - .bind(user_info_url) - .bind(button_text) - .bind(scope) - .bind(button_color) - .bind(button_text_color) - .bind(icon_svg) - .bind(name_claim) - .bind(email_claim) - .bind(username_claim) - .bind(roles_claim) - .bind(user_role) - .bind(admin_role) - .fetch_one(pool) - .await?; - - row.try_get("providerid")? - } - DatabasePool::MySQL(pool) => { - let result = sqlx::query(" - INSERT INTO OIDCProviders ( - ProviderName, ClientID, ClientSecret, AuthorizationURL, - TokenURL, UserInfoURL, ButtonText, Scope, - ButtonColor, ButtonTextColor, IconSVG, NameClaim, EmailClaim, - UsernameClaim, RolesClaim, UserRole, AdminRole - ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) - ") - .bind(provider_name) - .bind(client_id) - .bind(client_secret) - .bind(authorization_url) - .bind(token_url) - .bind(user_info_url) - .bind(button_text) - .bind(scope) - .bind(button_color) - .bind(button_text_color) - .bind(icon_svg) - .bind(name_claim) - .bind(email_claim) - .bind(username_claim) - .bind(roles_claim) - .bind(user_role) - .bind(admin_role) - .execute(pool) - .await?; - - result.last_insert_id() as i32 - } - }; + println!("Fetching podcast values from feed URL: {}", feed_url); - println!("Successfully added OIDC provider with ID: {}", provider_id); - Ok(provider_id) - } - - // List OIDC providers - matches Python list_oidc_providers function exactly - pub async fn list_oidc_providers(&self) -> AppResult> { - println!("Listing all OIDC providers"); + // Build HTTP client with proper configuration for container environment + let client = reqwest::Client::builder() + .user_agent("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36") + .timeout(std::time::Duration::from_secs(30)) + .build() + .map_err(|e| { + println!("Failed to build HTTP client in get_podcast_values: {}", e); + AppError::Http(e) + })?; - let providers = match self { - DatabasePool::Postgres(pool) => { - let rows = sqlx::query(r#" - SELECT providerid, providername, clientid, authorizationurl, - tokenurl, userinfourl, buttontext, scope, buttoncolor, - buttontextcolor, iconsvg, nameclaim, emailclaim, usernameclaim, - rolesclaim, userrole, adminrole, enabled, created, modified - FROM "OIDCProviders" - ORDER BY providername - "#) - .fetch_all(pool) - .await?; - - let mut providers = Vec::new(); - for row in rows { - let provider = serde_json::json!({ - "provider_id": row.try_get::("providerid")?, - "provider_name": row.try_get::("providername")?, - "client_id": row.try_get::("clientid")?, - "authorization_url": row.try_get::("authorizationurl")?, - "token_url": row.try_get::("tokenurl")?, - "user_info_url": row.try_get::("userinfourl")?, - "button_text": row.try_get::("buttontext")?, - "scope": row.try_get::("scope")?, - "button_color": row.try_get::("buttoncolor")?, - "button_text_color": row.try_get::("buttontextcolor")?, - "icon_svg": row.try_get::, _>("iconsvg")?, - "name_claim": row.try_get::, _>("nameclaim")?, - "email_claim": row.try_get::, _>("emailclaim")?, - "username_claim": row.try_get::, _>("usernameclaim")?, - "roles_claim": row.try_get::, _>("rolesclaim")?, - "user_role": row.try_get::, _>("userrole")?, - "admin_role": row.try_get::, _>("adminrole")?, - "enabled": row.try_get::("enabled")?, - "created": row.try_get::, _>("created")?, - "modified": row.try_get::, _>("modified")? - }); - providers.push(provider); + let mut request = client.get(feed_url); + + if let (Some(user), Some(pass)) = (username, password) { + println!("Using basic authentication for feed: {}", feed_url); + use base64::Engine; + let encoded = base64::engine::general_purpose::STANDARD.encode(format!("{}:{}", user, pass)); + request = request.header(AUTHORIZATION, format!("Basic {}", encoded)); + } + + // Fetch RSS feed with better error handling + println!("Sending HTTP request to: {}", feed_url); + let response = request.send().await + .map_err(|e| { + println!("HTTP request failed for {}: {}", feed_url, e); + AppError::Http(e) + })?; + + println!("Received response with status: {}", response.status()); + if !response.status().is_success() { + // If we get a 403, the server might be blocking browser User-Agents + // Try with a podcast client User-Agent + if response.status() == 403 { + println!("Got 403 Forbidden, trying with podcast client User-Agent"); + + let podcast_client = reqwest::Client::builder() + .user_agent("PinePods/1.0") + .timeout(std::time::Duration::from_secs(30)) + .build() + .map_err(|e| { + println!("Failed to build podcast client in get_podcast_values: {}", e); + AppError::Http(e) + })?; + + let mut podcast_request = podcast_client.get(feed_url); + + if let (Some(user), Some(pass)) = (username, password) { + println!("Using basic authentication for podcast client request: {}", feed_url); + use base64::Engine; + let encoded = base64::engine::general_purpose::STANDARD.encode(format!("{}:{}", user, pass)); + podcast_request = podcast_request.header(AUTHORIZATION, format!("Basic {}", encoded)); + } + + let podcast_response = podcast_request.send().await + .map_err(|e| { + println!("Podcast client request failed for {}: {}", feed_url, e); + AppError::Http(e) + })?; + + if podcast_response.status().is_success() { + println!("Podcast client request succeeded with status: {}", podcast_response.status()); + let content = podcast_response.text().await?; + + // Continue with the same parsing logic + return self.parse_feed_content_to_values(content, feed_url, user_id).await; } - providers - } - DatabasePool::MySQL(pool) => { - let rows = sqlx::query(" - SELECT ProviderID, ProviderName, ClientID, AuthorizationURL, - TokenURL, UserInfoURL, ButtonText, Scope, ButtonColor, - ButtonTextColor, IconSVG, NameClaim, EmailClaim, UsernameClaim, - RolesClaim, UserRole, AdminRole, Enabled, Created, Modified - FROM OIDCProviders - ORDER BY ProviderName - ") - .fetch_all(pool) - .await?; - let mut providers = Vec::new(); - for row in rows { - let provider = serde_json::json!({ - "provider_id": row.try_get::("ProviderID")?, - "provider_name": row.try_get::("ProviderName")?, - "client_id": row.try_get::("ClientID")?, - "authorization_url": row.try_get::("AuthorizationURL")?, - "token_url": row.try_get::("TokenURL")?, - "user_info_url": row.try_get::("UserInfoURL")?, - "button_text": row.try_get::("ButtonText")?, - "scope": row.try_get::("Scope")?, - "button_color": row.try_get::("ButtonColor")?, - "button_text_color": row.try_get::("ButtonTextColor")?, - "icon_svg": row.try_get::, _>("IconSVG")?, - "name_claim": row.try_get::, _>("NameClaim")?, - "email_claim": row.try_get::, _>("EmailClaim")?, - "username_claim": row.try_get::, _>("UsernameClaim")?, - "roles_claim": row.try_get::, _>("RolesClaim")?, - "user_role": row.try_get::, _>("UserRole")?, - "admin_role": row.try_get::, _>("AdminRole")?, - "enabled": row.try_get::("Enabled")?, - "created": row.try_get::, _>("Created")?, - "modified": row.try_get::, _>("Modified")? - }); - providers.push(provider); - } - providers + println!("Podcast client request also failed with status: {}", podcast_response.status()); } - }; + + return Err(AppError::bad_request(&format!("Feed request failed: HTTP {}", response.status()))); + } - println!("Found {} OIDC providers", providers.len()); - Ok(providers) + let content = response.text().await?; + + self.parse_feed_content_to_values(content, feed_url, user_id).await } - // Get user start page - matches Python get_user_startpage function exactly - pub async fn get_user_startpage(&self, user_id: i32) -> AppResult { - println!("Getting start page for user {}", user_id); + // Helper function to parse feed content into podcast values (extracted to avoid duplication) + async fn parse_feed_content_to_values(&self, content: String, feed_url: &str, user_id: i32) -> AppResult> { + use feed_rs::parser; - let startpage = match self { - DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#"SELECT startpage FROM "UserSettings" WHERE userid = $1"#) - .bind(user_id) - .fetch_optional(pool) - .await?; - - if let Some(row) = row { - row.try_get::, _>("startpage")?.unwrap_or_else(|| "home".to_string()) - } else { - "home".to_string() - } - } - DatabasePool::MySQL(pool) => { - let row = sqlx::query("SELECT StartPage FROM UserSettings WHERE UserID = ?") - .bind(user_id) - .fetch_optional(pool) - .await?; - - if let Some(row) = row { - row.try_get::, _>("StartPage")?.unwrap_or_else(|| "home".to_string()) - } else { - "home".to_string() - } - } + // Parse RSS feed using feed-rs + let feed = parser::parse(content.as_bytes()) + .map_err(|e| AppError::external_error(&format!("Failed to parse RSS feed: {}", e)))?; + + // Extract podcast metadata exactly as Python implementation + let mut podcast_values = std::collections::HashMap::new(); + + podcast_values.insert("feedurl".to_string(), feed_url.to_string()); + podcast_values.insert("userid".to_string(), user_id.to_string()); + podcast_values.insert("podcastname".to_string(), feed.title.as_ref().map(|t| t.content.clone()).unwrap_or_default()); + podcast_values.insert("description".to_string(), feed.description.as_ref().map(|d| d.content.clone()).unwrap_or_default()); + podcast_values.insert("author".to_string(), feed.authors.first().map(|a| a.name.clone()).unwrap_or_default()); + podcast_values.insert("websiteurl".to_string(), feed.links.first().map(|l| l.href.clone()).unwrap_or_default()); + podcast_values.insert("explicit".to_string(), "False".to_string()); // Default to False + podcast_values.insert("episodecount".to_string(), feed.entries.len().to_string()); + + // Extract artwork URL - check feed image and iTunes image + let artwork_url = feed.logo.as_ref().map(|l| l.uri.clone()) + .or_else(|| feed.icon.as_ref().map(|i| i.uri.clone())) + .unwrap_or_default(); + podcast_values.insert("artworkurl".to_string(), artwork_url); + + // Extract categories - convert to dict format like Python + let categories = if !feed.categories.is_empty() { + let cat_dict: std::collections::HashMap = feed.categories + .iter() + .enumerate() + .map(|(i, cat)| (i.to_string(), cat.term.clone())) + .collect(); + serde_json::to_string(&cat_dict).unwrap_or_default() + } else { + "{}".to_string() }; + podcast_values.insert("categories".to_string(), categories); - Ok(startpage) + // Set default values for fields not in RSS + podcast_values.insert("podcastindexid".to_string(), "0".to_string()); + podcast_values.insert("episodeupdatecount".to_string(), "0".to_string()); + + println!("Successfully extracted podcast values: {}", podcast_values.get("podcastname").unwrap_or(&"Unknown".to_string())); + + Ok(podcast_values) } - - // Set user start page - matches Python set_user_startpage function exactly - pub async fn set_user_startpage(&self, user_id: i32, startpage: &str) -> AppResult { - println!("Setting start page for user {} to {}", user_id, startpage); + + // Parse feed episodes using feed-rs and return JSON data (for frontend consumption) + pub async fn parse_feed_episodes(&self, feed_url: &str, user_id: i32) -> AppResult> { + use feed_rs::parser; + use reqwest::header::AUTHORIZATION; - // Check if user settings exist and perform update/insert - let success = match self { - DatabasePool::Postgres(pool) => { - let existing = sqlx::query(r#"SELECT COUNT(*) as count FROM "UserSettings" WHERE userid = $1"#) - .bind(user_id) - .fetch_one(pool) - .await?; - - let count: i64 = existing.try_get("count")?; - - if count > 0 { - // Update existing record - let result = sqlx::query(r#"UPDATE "UserSettings" SET startpage = $2 WHERE userid = $1"#) - .bind(user_id) - .bind(startpage) - .execute(pool) - .await?; - result.rows_affected() > 0 - } else { - // Insert new record - let result = sqlx::query(r#"INSERT INTO "UserSettings" (userid, startpage) VALUES ($1, $2)"#) - .bind(user_id) - .bind(startpage) - .execute(pool) - .await?; - result.rows_affected() > 0 + // Get stored authentication credentials for this feed + let (username, password) = self.get_feed_auth_credentials(feed_url, user_id).await?; + + // Build HTTP client with proper configuration + let client = reqwest::Client::builder() + .user_agent("PinePods/1.0") + .timeout(std::time::Duration::from_secs(30)) + .build() + .map_err(|e| AppError::external_error(&format!("Failed to build HTTP client: {}", e)))?; + + let mut request = client.get(feed_url); + + // Add authentication if available + if let (Some(user), Some(pass)) = (username.as_deref(), password.as_deref()) { + println!("Using stored authentication for feed: {}", feed_url); + use base64::Engine; + let encoded = base64::engine::general_purpose::STANDARD.encode(format!("{}:{}", user, pass)); + request = request.header(AUTHORIZATION, format!("Basic {}", encoded)); + } + + // Fetch RSS feed + let response = request.send().await?; + if !response.status().is_success() { + return Err(AppError::external_error(&format!("Failed to fetch RSS feed: {}", response.status()))); + } + + let content = response.text().await?; + + // Parse RSS feed using feed-rs + let feed = parser::parse(content.as_bytes()) + .map_err(|e| AppError::external_error(&format!("Failed to parse RSS feed: {}", e)))?; + + // Extract podcast-level artwork as fallback + let podcast_artwork = feed.logo.as_ref().map(|l| l.uri.clone()) + .or_else(|| feed.icon.as_ref().map(|i| i.uri.clone())) + .unwrap_or_default(); + + let mut episodes = Vec::new(); + + for entry in feed.entries.iter() { + // Skip episodes without titles + if entry.title.is_none() { + continue; + } + + // Extract episode data using same logic as add_episodes + let mut episode_data = std::collections::HashMap::new(); + + if let Some(title) = &entry.title { + episode_data.insert("title".to_string(), title.content.clone()); + } + + if let Some(summary) = &entry.summary { + episode_data.insert("summary".to_string(), summary.content.clone()); + } + + if let Some(content) = &entry.content { + if let Some(body) = &content.body { + episode_data.insert("content:encoded".to_string(), body.clone()); } } - DatabasePool::MySQL(pool) => { - let existing = sqlx::query("SELECT COUNT(*) as count FROM UserSettings WHERE UserID = ?") - .bind(user_id) - .fetch_one(pool) - .await?; - - let count: i64 = existing.try_get("count")?; - - if count > 0 { - // Update existing record - let result = sqlx::query("UPDATE UserSettings SET StartPage = ? WHERE UserID = ?") - .bind(startpage) - .bind(user_id) - .execute(pool) - .await?; - result.rows_affected() > 0 - } else { - // Insert new record - let result = sqlx::query("INSERT INTO UserSettings (UserID, StartPage) VALUES (?, ?)") - .bind(user_id) - .bind(startpage) - .execute(pool) - .await?; - result.rows_affected() > 0 + + // Extract audio URL from media or links + let mut audio_url = String::new(); + let mut enclosure_length = String::new(); + + // Check media objects first + for media in &entry.media { + for content in &media.content { + if let Some(url) = &content.url { + if let Some(media_type) = &content.content_type { + if media_type.to_string().starts_with("audio/") { + audio_url = url.to_string(); + if let Some(size) = content.size { + enclosure_length = size.to_string(); + } + break; + } + } + } } + if !audio_url.is_empty() { break; } } - }; + + // Fallback to links if no media found + if audio_url.is_empty() { + for link in &entry.links { + if let Some(media_type) = &link.media_type { + if media_type.starts_with("audio/") { + audio_url = link.href.clone(); + if let Some(length) = &link.length { + enclosure_length = length.to_string(); + } + break; + } + } + } + } + + // Extract episode artwork (with fallback to podcast artwork) + let mut episode_artwork = podcast_artwork.clone(); + for media in &entry.media { + if !media.thumbnails.is_empty() { + episode_artwork = media.thumbnails[0].image.uri.clone(); + break; + } + } + + // Extract duration from media or iTunes tags + let mut duration_seconds = 0; + + // First try media objects (works for some feeds) + for media in &entry.media { + if let Some(duration) = &media.duration { + duration_seconds = duration.as_secs() as i32; + break; + } + } + + // If media duration is suspicious (< 3600 seconds = 1 hour), + // try to extract iTunes duration from raw RSS to work around feed-rs parsing bugs + if duration_seconds > 0 && duration_seconds < 3600 { + if let Some(title) = &entry.title { + if let Some(corrected_duration) = Self::extract_itunes_duration_from_raw(&content, &title.content) { + if corrected_duration != duration_seconds as u64 { + println!("🔧 DURATION CORRECTED: '{}' feed-rs={} -> iTunes={}", + title.content, duration_seconds, corrected_duration); + duration_seconds = corrected_duration as i32; + } + } + } + } + + // Extract publication date + let pub_date = if let Some(published) = &entry.published { + published.to_rfc3339() + } else { + chrono::Utc::now().to_rfc3339() + }; + + // Create episode JSON object matching frontend Episode structure + let episode_json = serde_json::json!({ + "title": entry.title.as_ref().map(|t| t.content.clone()), + "description": entry.summary.as_ref().map(|s| s.content.clone()), + "pub_date": pub_date, + "enclosure_url": if audio_url.is_empty() { None } else { Some(audio_url) }, + "enclosure_length": if enclosure_length.is_empty() { None } else { Some(enclosure_length) }, + "artwork": if episode_artwork.is_empty() { None } else { Some(episode_artwork) }, + "content": entry.content.as_ref().and_then(|c| c.body.clone()), + "duration": duration_seconds, + "guid": entry.id.clone() + }); + + episodes.push(episode_json); + } - println!("Successfully set start page for user {} to {}: {}", user_id, startpage, success); - Ok(success) - } - - // Update startpage wrapper function for compatibility - pub async fn update_startpage(&self, user_id: i32, startpage: &str) -> AppResult { - self.set_user_startpage(user_id, startpage).await - } - - // Get startpage wrapper function for compatibility - pub async fn get_startpage(&self, user_id: i32) -> AppResult { - self.get_user_startpage(user_id).await + Ok(episodes) } - // Get user auto complete seconds setting - pub async fn get_user_auto_complete_seconds(&self, user_id: i32) -> AppResult { - println!("Getting auto complete seconds for user {}", user_id); + // Extract iTunes duration from raw RSS for a specific episode title (workaround for feed-rs bugs) + fn extract_itunes_duration_from_raw(raw_rss: &str, episode_title: &str) -> Option { + // HTML encode the episode title to match raw RSS format + // IMPORTANT: encode & first, otherwise it will double-encode other entities + let html_encoded_title = episode_title + .replace("&", "&") + .replace("'", "'") + .replace("\"", """) + .replace("<", "<") + .replace(">", ">"); + + // Try different title formats - RSS titles can vary from feed-rs parsed titles + let search_patterns = vec![ + format!("<![CDATA[{}]]>", episode_title), + format!("{}", episode_title), + format!("<![CDATA[{}]]>", html_encoded_title), + format!("{}", html_encoded_title), + // Also try searching for the core title without extra formatting + episode_title.split(" (").next().unwrap_or(episode_title).to_string(), + html_encoded_title.split(" (").next().unwrap_or(&html_encoded_title).to_string(), + ]; - let auto_complete_seconds = match self { - DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#"SELECT autocompleteseconds FROM "UserSettings" WHERE userid = $1"#) - .bind(user_id) - .fetch_optional(pool) - .await?; - - if let Some(row) = row { - row.try_get::, _>("autocompleteseconds")?.unwrap_or(0) - } else { - 0 + let mut item_pos = None; + let mut item_end_pos = None; + + // Find any item that contains this title or a partial match + for pattern in &search_patterns { + if let Some(pos) = raw_rss.find(pattern) { + // Find the start of the block containing this title + let item_start = raw_rss[..pos].rfind("").unwrap_or(0); + if let Some(end) = raw_rss[pos..].find("") { + item_pos = Some(item_start); + item_end_pos = Some(pos + end + "".len()); + break; } } - DatabasePool::MySQL(pool) => { - let row = sqlx::query("SELECT AutoCompleteSeconds FROM UserSettings WHERE UserID = ?") - .bind(user_id) - .fetch_optional(pool) - .await?; - - if let Some(row) = row { - row.try_get::, _>("AutoCompleteSeconds")?.unwrap_or(0) + } + + // If exact matches failed, try a broader search by looking through all items + if item_pos.is_none() { + let mut start = 0; + while let Some(item_start) = raw_rss[start..].find("") { + let absolute_start = start + item_start; + if let Some(item_end) = raw_rss[absolute_start..].find("") { + let absolute_end = absolute_start + item_end + "".len(); + let item_block = &raw_rss[absolute_start..absolute_end]; + + // Check if this item contains any part of our episode title + let title_core = episode_title.split(" (").next().unwrap_or(episode_title); + if item_block.contains(title_core) { + item_pos = Some(absolute_start); + item_end_pos = Some(absolute_end); + break; + } + + start = absolute_end; } else { - 0 + break; } } - }; + } - Ok(auto_complete_seconds) + // Extract duration from the found item block + if let (Some(start), Some(end)) = (item_pos, item_end_pos) { + let item_block = &raw_rss[start..end]; + + // Look for in this item block + if let Some(duration_start) = item_block.find("") { + let duration_content_start = duration_start + "".len(); + if let Some(duration_end) = item_block[duration_content_start..].find("") { + let duration_str = &item_block[duration_content_start..duration_content_start + duration_end]; + return Self::parse_itunes_duration(duration_str.trim()); + } + } + } + + None } - - // Set user auto complete seconds setting - pub async fn set_user_auto_complete_seconds(&self, user_id: i32, seconds: i32) -> AppResult { - println!("Setting auto complete seconds for user {} to {}", user_id, seconds); + + // Parse iTunes duration string (HH:MM:SS, MM:SS, or seconds) to total seconds + fn parse_itunes_duration(duration_str: &str) -> Option { + if duration_str.is_empty() { + return None; + } - // Check if user settings exist and perform update/insert - let success = match self { - DatabasePool::Postgres(pool) => { - let existing = sqlx::query(r#"SELECT COUNT(*) as count FROM "UserSettings" WHERE userid = $1"#) + // If it's just a number, treat as seconds + if let Ok(seconds) = duration_str.parse::() { + return Some(seconds); + } + + // Split by colons for time format + let parts: Vec<&str> = duration_str.split(':').collect(); + + match parts.len() { + 1 => { + // Just seconds + parts[0].parse::().ok() + }, + 2 => { + // MM:SS + let minutes = parts[0].parse::().ok()?; + let seconds = parts[1].parse::().ok()?; + Some(minutes * 60 + seconds) + }, + 3 => { + // HH:MM:SS + let hours = parts[0].parse::().ok()?; + let minutes = parts[1].parse::().ok()?; + let seconds = parts[2].parse::().ok()?; + Some(hours * 3600 + minutes * 60 + seconds) + }, + _ => None + } + } + + // Add podcast from RSS values - wrapper function for custom podcast addition + pub async fn add_podcast_from_values(&self, podcast_values: &std::collections::HashMap, user_id: i32, _feed_cutoff: i32, username: Option<&str>, password: Option<&str>) -> AppResult<(i32, Option)> { + // Convert HashMap values to PodcastValues struct + let podcast_data = crate::handlers::podcasts::PodcastValues { + user_id, + pod_title: podcast_values.get("podcastname").unwrap_or(&"".to_string()).clone(), + pod_artwork: podcast_values.get("artworkurl").unwrap_or(&"".to_string()).clone(), + pod_author: podcast_values.get("author").unwrap_or(&"".to_string()).clone(), + categories: std::collections::HashMap::new(), // Parse from string if needed + pod_description: podcast_values.get("description").unwrap_or(&"".to_string()).clone(), + pod_episode_count: podcast_values.get("episodecount").unwrap_or(&"0".to_string()).parse().unwrap_or(0), + pod_feed_url: podcast_values.get("feedurl").unwrap_or(&"".to_string()).clone(), + pod_website: podcast_values.get("websiteurl").unwrap_or(&"".to_string()).clone(), + pod_explicit: podcast_values.get("explicit").unwrap_or(&"False".to_string()) == "True", + }; + + let podcast_index_id = podcast_values.get("podcastindexid") + .unwrap_or(&"0".to_string()) + .parse::() + .unwrap_or(0); + + self.add_podcast(&podcast_data, podcast_index_id, username, password).await + } + + // // Get podcast details - matches Python get_podcast_details function exactly + // pub async fn get_podcast_details(&self, user_id: i32, podcast_id: i32) -> AppResult> { + // println!("Getting podcast details for podcast {} and user {}", podcast_id, user_id); + + // let details = match self { + // DatabasePool::Postgres(pool) => { + // // Try to get podcast for user first, then fall back to UserID=1 + // let mut row = sqlx::query(r#"SELECT * FROM "Podcasts" WHERE podcastid = $1 AND userid = $2"#) + // .bind(podcast_id) + // .bind(user_id) + // .fetch_optional(pool) + // .await?; + + // if row.is_none() { + // row = sqlx::query(r#"SELECT * FROM "Podcasts" WHERE podcastid = $1 AND userid = 1"#) + // .bind(podcast_id) + // .fetch_optional(pool) + // .await?; + // } + + // if let Some(row) = row { + // // Get episode count from YouTubeVideos table if this is a YouTube channel + // let mut episode_count = row.try_get::("episodecount")?; + // let is_youtube_channel = row.try_get::("isyoutubechannel").unwrap_or(false); + + // if is_youtube_channel { + // let count_result = sqlx::query(r#"SELECT COUNT(*) as count FROM "YouTubeVideos" WHERE podcastid = $1"#) + // .bind(podcast_id) + // .fetch_one(pool) + // .await?; + // episode_count = count_result.try_get::("count")? as i32; + // } + + // Some(serde_json::json!({ + // "podcastid": row.try_get::("podcastid")?, + // "podcastindexid": row.try_get::, _>("podcastindexid")?, + // "podcastname": row.try_get::("podcastname")?, + // "artworkurl": row.try_get::, _>("artworkurl").unwrap_or_default().unwrap_or_default(), + // "author": row.try_get::("author")?, + // "categories": row.try_get::, _>("categories")?, + // "description": row.try_get::("description")?, + // "episodecount": episode_count, + // "feedurl": row.try_get::("feedurl")?, + // "websiteurl": row.try_get::("websiteurl")?, + // "explicit": row.try_get::("explicit")?, + // "userid": row.try_get::("userid")?, + // "autodownload": row.try_get::("autodownload").unwrap_or(false), + // "startskip": row.try_get::("startskip").unwrap_or(0), + // "endskip": row.try_get::("endskip").unwrap_or(0), + // "username": row.try_get::, _>("username")?, + // "password": row.try_get::, _>("password")?, + // "isyoutubechannel": is_youtube_channel, + // "notificationsenabled": row.try_get::("notificationsenabled").unwrap_or(false), + // "feedcutoffdays": row.try_get::("feedcutoffdays").unwrap_or(0), + // })) + // } else { + // None + // } + // } + // DatabasePool::MySQL(pool) => { + // // Try to get podcast for user first, then fall back to UserID=1 + // let mut row = sqlx::query("SELECT * FROM Podcasts WHERE PodcastID = ? AND UserID = ?") + // .bind(podcast_id) + // .bind(user_id) + // .fetch_optional(pool) + // .await?; + + // if row.is_none() { + // row = sqlx::query("SELECT * FROM Podcasts WHERE PodcastID = ? AND UserID = 1") + // .bind(podcast_id) + // .fetch_optional(pool) + // .await?; + // } + + // if let Some(row) = row { + // // Get episode count from YouTubeVideos table if this is a YouTube channel + // let mut episode_count = row.try_get::("EpisodeCount")?; + // let is_youtube_channel = row.try_get::("IsYouTubeChannel").unwrap_or(0) != 0; + + // if is_youtube_channel { + // let count_result = sqlx::query("SELECT COUNT(*) as count FROM YouTubeVideos WHERE PodcastID = ?") + // .bind(podcast_id) + // .fetch_one(pool) + // .await?; + // episode_count = count_result.try_get::("count")? as i32; + // } + + // Some(serde_json::json!({ + // "podcastid": row.try_get::("PodcastID")?, + // "podcastindexid": row.try_get::, _>("PodcastIndexID")?, + // "podcastname": row.try_get::("PodcastName")?, + // "artworkurl": row.try_get::, _>("ArtworkURL").unwrap_or_default().unwrap_or_default(), + // "author": row.try_get::("Author")?, + // "categories": row.try_get::, _>("Categories")?, + // "description": row.try_get::("Description")?, + // "episodecount": episode_count, + // "feedurl": row.try_get::("FeedURL")?, + // "websiteurl": row.try_get::("WebsiteURL")?, + // "explicit": row.try_get::("Explicit").unwrap_or(0) != 0, + // "userid": row.try_get::("UserID")?, + // "autodownload": row.try_get::("AutoDownload").unwrap_or(0) != 0, + // "startskip": row.try_get::("StartSkip").unwrap_or(0), + // "endskip": row.try_get::("EndSkip").unwrap_or(0), + // "username": row.try_get::, _>("Username")?, + // "password": row.try_get::, _>("Password")?, + // "isyoutubechannel": is_youtube_channel, + // "notificationsenabled": row.try_get::("NotificationsEnabled").unwrap_or(0) != 0, + // "feedcutoffdays": row.try_get::("FeedCutoffDays").unwrap_or(0), + // })) + // } else { + // None + // } + // } + // }; + + // if let Some(ref result) = details { + // println!("Found podcast details for: {}", result["podcast_name"]); + // } else { + // println!("No podcast found with ID {} for user {}", podcast_id, user_id); + // } + + // Ok(details) + // } + + // Get notification settings - matches Python get_notification_settings function exactly + pub async fn get_notification_settings(&self, user_id: i32) -> AppResult> { + println!("Getting notification settings for user {}", user_id); + + let settings = match self { + DatabasePool::Postgres(pool) => { + let rows = sqlx::query(r#" + SELECT platform, enabled, ntfytopic, ntfyserverurl, ntfyusername, ntfypassword, ntfyaccesstoken, gotifyurl, gotifytoken, httpurl, httptoken, httpmethod + FROM "UserNotificationSettings" + WHERE userid = $1 + "#) + .bind(user_id) + .fetch_all(pool) + .await?; + + let mut settings = Vec::new(); + for row in rows { + let setting = serde_json::json!({ + "platform": row.try_get::("platform")?, + "enabled": row.try_get::("enabled")?, + "ntfy_topic": row.try_get::, _>("ntfytopic")?, + "ntfy_server_url": row.try_get::, _>("ntfyserverurl")?, + "ntfy_username": row.try_get::, _>("ntfyusername")?, + "ntfy_password": row.try_get::, _>("ntfypassword")?, + "ntfy_access_token": row.try_get::, _>("ntfyaccesstoken")?, + "gotify_url": row.try_get::, _>("gotifyurl")?, + "gotify_token": row.try_get::, _>("gotifytoken")?, + "http_url": row.try_get::, _>("httpurl")?, + "http_token": row.try_get::, _>("httptoken")?, + "http_method": row.try_get::, _>("httpmethod")? + }); + settings.push(setting); + } + settings + } + DatabasePool::MySQL(pool) => { + let rows = sqlx::query(" + SELECT Platform, Enabled, NtfyTopic, NtfyServerURL, NtfyUsername, NtfyPassword, NtfyAccessToken, GotifyURL, GotifyToken, HttpUrl, HttpToken, HttpMethod + FROM UserNotificationSettings + WHERE UserID = ? + ORDER BY Platform + ") + .bind(user_id) + .fetch_all(pool) + .await?; + + let mut settings = Vec::new(); + for row in rows { + let setting = serde_json::json!({ + "platform": row.try_get::("Platform")?, + "enabled": row.try_get::("Enabled")?, + "ntfy_topic": row.try_get::, _>("NtfyTopic")?, + "ntfy_server_url": row.try_get::, _>("NtfyServerURL")?, + "ntfy_username": row.try_get::, _>("NtfyUsername")?, + "ntfy_password": row.try_get::, _>("NtfyPassword")?, + "ntfy_access_token": row.try_get::, _>("NtfyAccessToken")?, + "gotify_url": row.try_get::, _>("GotifyURL")?, + "gotify_token": row.try_get::, _>("GotifyToken")?, + "http_url": row.try_get::, _>("HttpUrl")?, + "http_token": row.try_get::, _>("HttpToken")?, + "http_method": row.try_get::, _>("HttpMethod")? + }); + settings.push(setting); + } + settings + } + }; + + println!("Found {} notification settings for user {}", settings.len(), user_id); + Ok(settings) + } + + // Update notification settings - matches Python update_notification_settings function exactly + pub async fn update_notification_settings(&self, user_id: i32, platform: &str, enabled: bool, ntfy_topic: Option<&str>, ntfy_server_url: Option<&str>, ntfy_username: Option<&str>, ntfy_password: Option<&str>, ntfy_access_token: Option<&str>, gotify_url: Option<&str>, gotify_token: Option<&str>, http_url: Option<&str>, http_token: Option<&str>, http_method: Option<&str>) -> AppResult { + println!("Updating notification settings for user {} platform {}", user_id, platform); + + // Check if settings exist for this user/platform combination and perform update/insert + let success = match self { + DatabasePool::Postgres(pool) => { + let existing = sqlx::query(r#"SELECT COUNT(*) as count FROM "UserNotificationSettings" WHERE userid = $1 AND platform = $2"#) .bind(user_id) + .bind(platform) .fetch_one(pool) .await?; @@ -11815,25 +14278,56 @@ impl DatabasePool { if count > 0 { // Update existing record - let result = sqlx::query(r#"UPDATE "UserSettings" SET autocompleteseconds = $2 WHERE userid = $1"#) + let result = sqlx::query(r#" + UPDATE "UserNotificationSettings" + SET enabled = $3, ntfytopic = $4, ntfyserverurl = $5, ntfyusername = $6, ntfypassword = $7, ntfyaccesstoken = $8, gotifyurl = $9, gotifytoken = $10, httpurl = $11, httptoken = $12, httpmethod = $13 + WHERE userid = $1 AND platform = $2 + "#) .bind(user_id) - .bind(seconds) + .bind(platform) + .bind(enabled) + .bind(ntfy_topic) + .bind(ntfy_server_url) + .bind(ntfy_username) + .bind(ntfy_password) + .bind(ntfy_access_token) + .bind(gotify_url) + .bind(gotify_token) + .bind(http_url) + .bind(http_token) + .bind(http_method) .execute(pool) .await?; result.rows_affected() > 0 } else { - // Insert new record with default theme - let result = sqlx::query(r#"INSERT INTO "UserSettings" (userid, autocompleteseconds, theme) VALUES ($1, $2, 'Nordic')"#) + // Insert new record + let result = sqlx::query(r#" + INSERT INTO "UserNotificationSettings" + (userid, platform, enabled, ntfytopic, ntfyserverurl, ntfyusername, ntfypassword, ntfyaccesstoken, gotifyurl, gotifytoken, httpurl, httptoken, httpmethod) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13) + "#) .bind(user_id) - .bind(seconds) + .bind(platform) + .bind(enabled) + .bind(ntfy_topic) + .bind(ntfy_server_url) + .bind(ntfy_username) + .bind(ntfy_password) + .bind(ntfy_access_token) + .bind(gotify_url) + .bind(gotify_token) + .bind(http_url) + .bind(http_token) + .bind(http_method) .execute(pool) .await?; result.rows_affected() > 0 } } DatabasePool::MySQL(pool) => { - let existing = sqlx::query("SELECT COUNT(*) as count FROM UserSettings WHERE UserID = ?") + let existing = sqlx::query("SELECT COUNT(*) as count FROM UserNotificationSettings WHERE UserID = ? AND Platform = ?") .bind(user_id) + .bind(platform) .fetch_one(pool) .await?; @@ -11841,17 +14335,47 @@ impl DatabasePool { if count > 0 { // Update existing record - let result = sqlx::query("UPDATE UserSettings SET AutoCompleteSeconds = ? WHERE UserID = ?") - .bind(seconds) + let result = sqlx::query(" + UPDATE UserNotificationSettings + SET Enabled = ?, NtfyTopic = ?, NtfyServerURL = ?, NtfyUsername = ?, NtfyPassword = ?, NtfyAccessToken = ?, GotifyURL = ?, GotifyToken = ?, HttpUrl = ?, HttpToken = ?, HttpMethod = ? + WHERE UserID = ? AND Platform = ? + ") + .bind(enabled) + .bind(ntfy_topic) + .bind(ntfy_server_url) + .bind(ntfy_username) + .bind(ntfy_password) + .bind(ntfy_access_token) + .bind(gotify_url) + .bind(gotify_token) + .bind(http_url) + .bind(http_token) + .bind(http_method) .bind(user_id) + .bind(platform) .execute(pool) .await?; result.rows_affected() > 0 } else { - // Insert new record with default theme - let result = sqlx::query("INSERT INTO UserSettings (UserID, AutoCompleteSeconds, Theme) VALUES (?, ?, 'Nordic')") + // Insert new record + let result = sqlx::query(" + INSERT INTO UserNotificationSettings + (UserID, Platform, Enabled, NtfyTopic, NtfyServerURL, NtfyUsername, NtfyPassword, NtfyAccessToken, GotifyURL, GotifyToken, HttpUrl, HttpToken, HttpMethod) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + ") .bind(user_id) - .bind(seconds) + .bind(platform) + .bind(enabled) + .bind(ntfy_topic) + .bind(ntfy_server_url) + .bind(ntfy_username) + .bind(ntfy_password) + .bind(ntfy_access_token) + .bind(gotify_url) + .bind(gotify_token) + .bind(http_url) + .bind(http_token) + .bind(http_method) .execute(pool) .await?; result.rows_affected() > 0 @@ -11859,6677 +14383,10782 @@ impl DatabasePool { } }; - println!("Successfully set auto complete seconds for user {} to {}: {}", user_id, seconds, success); + println!("Successfully updated notification settings for user {} platform {}: {}", user_id, platform, success); Ok(success) } - - // Get episode duration in seconds - pub async fn get_episode_duration(&self, episode_id: i32) -> AppResult { - match self { + + // Add OIDC provider - matches Python add_oidc_provider function exactly + pub async fn add_oidc_provider(&self, provider_name: &str, client_id: &str, client_secret: &str, authorization_url: &str, token_url: &str, user_info_url: &str, button_text: &str, scope: &str, button_color: &str, button_text_color: &str, icon_svg: &str, name_claim: &str, email_claim: &str, username_claim: &str, roles_claim: &str, user_role: &str, admin_role: &str, initialized_from_env: bool) -> AppResult { + println!("Adding OIDC provider: {}", provider_name); + + let provider_id = match self { DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#"SELECT episodeduration FROM "Episodes" WHERE episodeid = $1"#) - .bind(episode_id) - .fetch_optional(pool) + let row = sqlx::query(r#" + INSERT INTO "OIDCProviders" ( + providername, clientid, clientsecret, authorizationurl, + tokenurl, userinfourl, buttontext, scope, + buttoncolor, buttontextcolor, iconsvg, nameclaim, emailclaim, + usernameclaim, rolesclaim, userrole, adminrole, initializedFromEnv + ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18) + RETURNING providerid + "#) + .bind(provider_name) + .bind(client_id) + .bind(client_secret) + .bind(authorization_url) + .bind(token_url) + .bind(user_info_url) + .bind(button_text) + .bind(scope) + .bind(button_color) + .bind(button_text_color) + .bind(icon_svg) + .bind(name_claim) + .bind(email_claim) + .bind(username_claim) + .bind(roles_claim) + .bind(user_role) + .bind(admin_role) + .bind(initialized_from_env) + .fetch_one(pool) .await?; - if let Some(row) = row { - Ok(row.try_get::, _>("episodeduration")?.unwrap_or(0)) - } else { - Ok(0) + row.try_get("providerid")? + } + DatabasePool::MySQL(pool) => { + let result = sqlx::query(" + INSERT INTO OIDCProviders ( + ProviderName, ClientID, ClientSecret, AuthorizationURL, + TokenURL, UserInfoURL, ButtonText, Scope, + ButtonColor, ButtonTextColor, IconSVG, NameClaim, EmailClaim, + UsernameClaim, RolesClaim, UserRole, AdminRole, InitializedFromEnv + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + ") + .bind(provider_name) + .bind(client_id) + .bind(client_secret) + .bind(authorization_url) + .bind(token_url) + .bind(user_info_url) + .bind(button_text) + .bind(scope) + .bind(button_color) + .bind(button_text_color) + .bind(icon_svg) + .bind(name_claim) + .bind(email_claim) + .bind(username_claim) + .bind(roles_claim) + .bind(user_role) + .bind(admin_role) + .bind(initialized_from_env) + .execute(pool) + .await?; + + result.last_insert_id() as i32 + } + }; + + println!("Successfully added OIDC provider with ID: {}", provider_id); + Ok(provider_id) + } + + // List OIDC providers - matches Python list_oidc_providers function exactly + pub async fn list_oidc_providers(&self) -> AppResult> { + println!("Listing all OIDC providers"); + + let providers = match self { + DatabasePool::Postgres(pool) => { + let rows = sqlx::query(r#" + SELECT providerid, providername, clientid, authorizationurl, + tokenurl, userinfourl, buttontext, scope, buttoncolor, + buttontextcolor, iconsvg, nameclaim, emailclaim, usernameclaim, + rolesclaim, userrole, adminrole, enabled, created, modified, initializedfromenv + FROM "OIDCProviders" + ORDER BY providername + "#) + .fetch_all(pool) + .await?; + + let mut providers = Vec::new(); + for row in rows { + let provider = serde_json::json!({ + "provider_id": row.try_get::("providerid")?, + "provider_name": row.try_get::("providername")?, + "client_id": row.try_get::("clientid")?, + "authorization_url": row.try_get::("authorizationurl")?, + "token_url": row.try_get::("tokenurl")?, + "user_info_url": row.try_get::("userinfourl")?, + "button_text": row.try_get::("buttontext")?, + "scope": row.try_get::("scope")?, + "button_color": row.try_get::("buttoncolor")?, + "button_text_color": row.try_get::("buttontextcolor")?, + "icon_svg": row.try_get::, _>("iconsvg")?, + "name_claim": row.try_get::, _>("nameclaim")?, + "email_claim": row.try_get::, _>("emailclaim")?, + "username_claim": row.try_get::, _>("usernameclaim")?, + "roles_claim": row.try_get::, _>("rolesclaim")?, + "user_role": row.try_get::, _>("userrole")?, + "admin_role": row.try_get::, _>("adminrole")?, + "enabled": row.try_get::("enabled")?, + "created": row.try_get::, _>("created")?, + "modified": row.try_get::, _>("modified")?, + "initialized_from_env": row.try_get::("initializedfromenv").unwrap_or(false) + }); + providers.push(provider); } + providers } DatabasePool::MySQL(pool) => { - let row = sqlx::query("SELECT EpisodeDuration FROM Episodes WHERE EpisodeID = ?") - .bind(episode_id) - .fetch_optional(pool) + let rows = sqlx::query(" + SELECT ProviderID, ProviderName, ClientID, AuthorizationURL, + TokenURL, UserInfoURL, ButtonText, Scope, ButtonColor, + ButtonTextColor, IconSVG, NameClaim, EmailClaim, UsernameClaim, + RolesClaim, UserRole, AdminRole, Enabled, Created, Modified, InitializedFromEnv + FROM OIDCProviders + ORDER BY ProviderName + ") + .fetch_all(pool) .await?; - if let Some(row) = row { - Ok(row.try_get::, _>("EpisodeDuration")?.unwrap_or(0)) - } else { - Ok(0) + let mut providers = Vec::new(); + for row in rows { + let provider = serde_json::json!({ + "provider_id": row.try_get::("ProviderID")?, + "provider_name": row.try_get::("ProviderName")?, + "client_id": row.try_get::("ClientID")?, + "authorization_url": row.try_get::("AuthorizationURL")?, + "token_url": row.try_get::("TokenURL")?, + "user_info_url": row.try_get::("UserInfoURL")?, + "button_text": row.try_get::("ButtonText")?, + "scope": row.try_get::("Scope")?, + "button_color": row.try_get::("ButtonColor")?, + "button_text_color": row.try_get::("ButtonTextColor")?, + "icon_svg": row.try_get::, _>("IconSVG")?, + "name_claim": row.try_get::, _>("NameClaim")?, + "email_claim": row.try_get::, _>("EmailClaim")?, + "username_claim": row.try_get::, _>("UsernameClaim")?, + "roles_claim": row.try_get::, _>("RolesClaim")?, + "user_role": row.try_get::, _>("UserRole")?, + "admin_role": row.try_get::, _>("AdminRole")?, + "enabled": row.try_get::("Enabled")?, + "created": row.try_get::, _>("Created")?, + "modified": row.try_get::, _>("Modified")?, + "initialized_from_env": row.try_get::("InitializedFromEnv").unwrap_or(false) + }); + providers.push(provider); } + providers + } + }; + + println!("Found {} OIDC providers", providers.len()); + Ok(providers) + } + + // Remove OIDC provider - matches Python remove_oidc_provider function exactly + pub async fn remove_oidc_provider(&self, provider_id: i32) -> AppResult { + println!("Removing OIDC provider with ID: {}", provider_id); + + let rows_affected = match self { + DatabasePool::Postgres(pool) => { + let result = sqlx::query(r#"DELETE FROM "OIDCProviders" WHERE ProviderID = $1"#) + .bind(provider_id) + .execute(pool) + .await?; + + result.rows_affected() + } + DatabasePool::MySQL(pool) => { + let result = sqlx::query("DELETE FROM OIDCProviders WHERE ProviderID = ?") + .bind(provider_id) + .execute(pool) + .await?; + + result.rows_affected() } + }; + + let success = rows_affected > 0; + if success { + println!("Successfully removed OIDC provider with ID: {}", provider_id); + } else { + println!("No OIDC provider found with ID: {}", provider_id); } + Ok(success) } - // Get YouTube episode duration in seconds - pub async fn get_youtube_episode_duration(&self, episode_id: i32) -> AppResult { + // Initialize OIDC provider from environment variables on container startup + pub async fn init_oidc_from_env(&self, oidc_config: &OIDCConfig) -> AppResult<()> { + if !oidc_config.is_configured() { + println!("OIDC environment variables not configured, skipping initialization"); + return Ok(()); + } + + let provider_name = oidc_config.provider_name.as_ref().unwrap(); + let client_id = oidc_config.client_id.as_ref().unwrap(); + + // Check if provider already exists + let existing = self.get_oidc_provider_by_client_id(client_id).await?; + if existing.is_some() { + println!("OIDC provider with client_id '{}' already exists, skipping initialization", client_id); + return Ok(()); + } + + println!("Initializing OIDC provider '{}' from environment variables", provider_name); + + // Create the provider with all the configuration + let provider_id = self.add_oidc_provider( + provider_name, + client_id, + oidc_config.client_secret.as_ref().unwrap(), + oidc_config.authorization_url.as_ref().unwrap(), + oidc_config.token_url.as_ref().unwrap(), + oidc_config.user_info_url.as_ref().unwrap(), + oidc_config.button_text.as_ref().unwrap(), + oidc_config.scope.as_deref().unwrap_or("openid email profile"), + oidc_config.button_color.as_deref().unwrap_or("#000000"), + oidc_config.button_text_color.as_deref().unwrap_or("#FFFFFF"), + oidc_config.icon_svg.as_deref().unwrap_or(""), + oidc_config.name_claim.as_deref().unwrap_or("name"), + oidc_config.email_claim.as_deref().unwrap_or("email"), + oidc_config.username_claim.as_deref().unwrap_or("preferred_username"), + oidc_config.roles_claim.as_deref().unwrap_or("roles"), + oidc_config.user_role.as_deref().unwrap_or("user"), + oidc_config.admin_role.as_deref().unwrap_or("admin"), + true, // initialized_from_env = true + ).await?; + + println!("Successfully initialized OIDC provider '{}' with ID: {}", provider_name, provider_id); + Ok(()) + } + + // Check if OIDC provider was initialized from environment variables + pub async fn is_oidc_provider_env_initialized(&self, provider_id: i32) -> AppResult { match self { DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#"SELECT duration FROM "YouTubeVideos" WHERE videoid = $1"#) - .bind(episode_id) + let result = sqlx::query(r#" + SELECT initializedfromenv FROM "OIDCProviders" WHERE providerid = $1 + "#) + .bind(provider_id) .fetch_optional(pool) .await?; - if let Some(row) = row { - Ok(row.try_get::, _>("duration")?.unwrap_or(0)) - } else { - Ok(0) - } + Ok(result.map(|row| row.try_get::("initializedfromenv").unwrap_or(false)).unwrap_or(false)) } DatabasePool::MySQL(pool) => { - let row = sqlx::query("SELECT Duration FROM YouTubeVideos WHERE VideoID = ?") - .bind(episode_id) + let result = sqlx::query(r#" + SELECT InitializedFromEnv FROM OIDCProviders WHERE ProviderID = ? + "#) + .bind(provider_id) .fetch_optional(pool) .await?; - if let Some(row) = row { - Ok(row.try_get::, _>("Duration")?.unwrap_or(0)) - } else { - Ok(0) - } + Ok(result.map(|row| row.try_get::("InitializedFromEnv").unwrap_or(false)).unwrap_or(false)) } } } - // Get users who have auto-complete enabled (auto_complete_seconds > 0) - pub async fn get_users_with_auto_complete_enabled(&self) -> AppResult> { - match self { - DatabasePool::Postgres(pool) => { - let rows = sqlx::query(r#" - SELECT us.userid, us.autocompleteseconds - FROM "UserSettings" us - WHERE us.autocompleteseconds > 0 - "#) - .fetch_all(pool) - .await?; + // Update OIDC provider - updates an existing provider with new values + // If client_secret is None, the existing secret will be preserved + pub async fn update_oidc_provider(&self, provider_id: i32, provider_name: &str, client_id: &str, client_secret: Option<&str>, authorization_url: &str, token_url: &str, user_info_url: &str, button_text: &str, scope: &str, button_color: &str, button_text_color: &str, icon_svg: &str, name_claim: &str, email_claim: &str, username_claim: &str, roles_claim: &str, user_role: &str, admin_role: &str) -> AppResult { + println!("Updating OIDC provider with ID: {}", provider_id); - let mut users = Vec::new(); - for row in rows { - users.push(UserAutoComplete { - user_id: row.try_get("userid")?, - auto_complete_seconds: row.try_get("autocompleteseconds")?, - }); + let rows_affected = match self { + DatabasePool::Postgres(pool) => { + // Build query dynamically based on whether client_secret is provided + if let Some(secret) = client_secret { + let result = sqlx::query(r#" + UPDATE "OIDCProviders" SET + providername = $2, clientid = $3, clientsecret = $4, + authorizationurl = $5, tokenurl = $6, userinfourl = $7, + buttontext = $8, scope = $9, buttoncolor = $10, + buttontextcolor = $11, iconsvg = $12, nameclaim = $13, + emailclaim = $14, usernameclaim = $15, rolesclaim = $16, + userrole = $17, adminrole = $18, modified = CURRENT_TIMESTAMP + WHERE providerid = $1 + "#) + .bind(provider_id) + .bind(provider_name) + .bind(client_id) + .bind(secret) + .bind(authorization_url) + .bind(token_url) + .bind(user_info_url) + .bind(button_text) + .bind(scope) + .bind(button_color) + .bind(button_text_color) + .bind(icon_svg) + .bind(name_claim) + .bind(email_claim) + .bind(username_claim) + .bind(roles_claim) + .bind(user_role) + .bind(admin_role) + .execute(pool) + .await?; + result.rows_affected() + } else { + // Don't update client_secret if not provided + let result = sqlx::query(r#" + UPDATE "OIDCProviders" SET + providername = $2, clientid = $3, + authorizationurl = $4, tokenurl = $5, userinfourl = $6, + buttontext = $7, scope = $8, buttoncolor = $9, + buttontextcolor = $10, iconsvg = $11, nameclaim = $12, + emailclaim = $13, usernameclaim = $14, rolesclaim = $15, + userrole = $16, adminrole = $17, modified = CURRENT_TIMESTAMP + WHERE providerid = $1 + "#) + .bind(provider_id) + .bind(provider_name) + .bind(client_id) + .bind(authorization_url) + .bind(token_url) + .bind(user_info_url) + .bind(button_text) + .bind(scope) + .bind(button_color) + .bind(button_text_color) + .bind(icon_svg) + .bind(name_claim) + .bind(email_claim) + .bind(username_claim) + .bind(roles_claim) + .bind(user_role) + .bind(admin_role) + .execute(pool) + .await?; + result.rows_affected() } - Ok(users) } DatabasePool::MySQL(pool) => { - let rows = sqlx::query(" - SELECT UserID, AutoCompleteSeconds - FROM UserSettings - WHERE AutoCompleteSeconds > 0 - ") - .fetch_all(pool) - .await?; - - let mut users = Vec::new(); - for row in rows { - users.push(UserAutoComplete { - user_id: row.try_get("UserID")?, - auto_complete_seconds: row.try_get("AutoCompleteSeconds")?, - }); + if let Some(secret) = client_secret { + let result = sqlx::query(" + UPDATE OIDCProviders SET + ProviderName = ?, ClientID = ?, ClientSecret = ?, + AuthorizationURL = ?, TokenURL = ?, UserInfoURL = ?, + ButtonText = ?, Scope = ?, ButtonColor = ?, + ButtonTextColor = ?, IconSVG = ?, NameClaim = ?, + EmailClaim = ?, UsernameClaim = ?, RolesClaim = ?, + UserRole = ?, AdminRole = ?, Modified = CURRENT_TIMESTAMP + WHERE ProviderID = ? + ") + .bind(provider_name) + .bind(client_id) + .bind(secret) + .bind(authorization_url) + .bind(token_url) + .bind(user_info_url) + .bind(button_text) + .bind(scope) + .bind(button_color) + .bind(button_text_color) + .bind(icon_svg) + .bind(name_claim) + .bind(email_claim) + .bind(username_claim) + .bind(roles_claim) + .bind(user_role) + .bind(admin_role) + .bind(provider_id) + .execute(pool) + .await?; + result.rows_affected() + } else { + // Don't update client_secret if not provided + let result = sqlx::query(" + UPDATE OIDCProviders SET + ProviderName = ?, ClientID = ?, + AuthorizationURL = ?, TokenURL = ?, UserInfoURL = ?, + ButtonText = ?, Scope = ?, ButtonColor = ?, + ButtonTextColor = ?, IconSVG = ?, NameClaim = ?, + EmailClaim = ?, UsernameClaim = ?, RolesClaim = ?, + UserRole = ?, AdminRole = ?, Modified = CURRENT_TIMESTAMP + WHERE ProviderID = ? + ") + .bind(provider_name) + .bind(client_id) + .bind(authorization_url) + .bind(token_url) + .bind(user_info_url) + .bind(button_text) + .bind(scope) + .bind(button_color) + .bind(button_text_color) + .bind(icon_svg) + .bind(name_claim) + .bind(email_claim) + .bind(username_claim) + .bind(roles_claim) + .bind(user_role) + .bind(admin_role) + .bind(provider_id) + .execute(pool) + .await?; + result.rows_affected() } - Ok(users) } + }; + + let success = rows_affected > 0; + if success { + println!("Successfully updated OIDC provider with ID: {}", provider_id); + } else { + println!("No OIDC provider found with ID: {}", provider_id); } + Ok(success) } - - // Auto-complete episodes for a user based on their setting - pub async fn auto_complete_user_episodes(&self, user_id: i32, auto_complete_seconds: i32) -> AppResult { - if auto_complete_seconds <= 0 { - return Ok(0); - } - - let mut completed_count = 0; - - match self { + + // Get user start page - matches Python get_user_startpage function exactly + pub async fn get_user_startpage(&self, user_id: i32) -> AppResult { + println!("Getting start page for user {}", user_id); + + let startpage = match self { DatabasePool::Postgres(pool) => { - // Handle regular episodes - let episode_rows = sqlx::query(r#" - SELECT e.episodeid, e.episodeduration, COALESCE(h.listenduration, 0) as listenduration - FROM "Episodes" e - LEFT JOIN "UserEpisodeHistory" h ON e.episodeid = h.episodeid AND h.userid = $1 - WHERE e.completed = false - AND e.episodeduration > 0 - AND h.listenduration > 0 - AND (e.episodeduration - h.listenduration) <= $2 - "#) - .bind(user_id) - .bind(auto_complete_seconds) - .fetch_all(pool) - .await?; - - for row in episode_rows { - let episode_id: i32 = row.try_get("episodeid")?; - let _ = self.mark_episode_completed(episode_id, user_id, false).await; - completed_count += 1; - } - - // Handle YouTube episodes - let youtube_rows = sqlx::query(r#" - SELECT v.videoid, v.duration, COALESCE(h.listenduration, 0) as listenduration - FROM "YouTubeVideos" v - LEFT JOIN "UserVideoHistory" h ON v.videoid = h.videoid AND h.userid = $1 - WHERE v.completed = false - AND v.duration > 0 - AND h.listenduration > 0 - AND (v.duration - h.listenduration) <= $2 - "#) - .bind(user_id) - .bind(auto_complete_seconds) - .fetch_all(pool) - .await?; - - for row in youtube_rows { - let video_id: i32 = row.try_get("videoid")?; - let _ = self.mark_episode_completed(video_id, user_id, true).await; - completed_count += 1; + let row = sqlx::query(r#"SELECT startpage FROM "UserSettings" WHERE userid = $1"#) + .bind(user_id) + .fetch_optional(pool) + .await?; + + if let Some(row) = row { + row.try_get::, _>("startpage")?.unwrap_or_else(|| "home".to_string()) + } else { + "home".to_string() } } DatabasePool::MySQL(pool) => { - // Handle regular episodes - let episode_rows = sqlx::query(" - SELECT e.EpisodeID, e.EpisodeDuration, COALESCE(h.ListenDuration, 0) as ListenDuration - FROM Episodes e - LEFT JOIN UserEpisodeHistory h ON e.EpisodeID = h.EpisodeID AND h.UserID = ? - WHERE e.Completed = 0 - AND e.EpisodeDuration > 0 - AND h.ListenDuration > 0 - AND (e.EpisodeDuration - h.ListenDuration) <= ? - ") - .bind(user_id) - .bind(auto_complete_seconds) - .fetch_all(pool) - .await?; - - for row in episode_rows { - let episode_id: i32 = row.try_get("EpisodeID")?; - let _ = self.mark_episode_completed(episode_id, user_id, false).await; - completed_count += 1; - } - - // Handle YouTube episodes - let youtube_rows = sqlx::query(" - SELECT v.VideoID, v.Duration, COALESCE(h.ListenDuration, 0) as ListenDuration - FROM YouTubeVideos v - LEFT JOIN UserVideoHistory h ON v.VideoID = h.VideoID AND h.UserID = ? - WHERE v.Completed = 0 - AND v.Duration > 0 - AND h.ListenDuration > 0 - AND (v.Duration - h.ListenDuration) <= ? - ") - .bind(user_id) - .bind(auto_complete_seconds) - .fetch_all(pool) - .await?; - - for row in youtube_rows { - let video_id: i32 = row.try_get("VideoID")?; - let _ = self.mark_episode_completed(video_id, user_id, true).await; - completed_count += 1; + let row = sqlx::query("SELECT StartPage FROM UserSettings WHERE UserID = ?") + .bind(user_id) + .fetch_optional(pool) + .await?; + + if let Some(row) = row { + row.try_get::, _>("StartPage")?.unwrap_or_else(|| "home".to_string()) + } else { + "home".to_string() } } - } - - Ok(completed_count) + }; + + Ok(startpage) } - // Subscribe to person - matches Python subscribe_to_person function exactly - pub async fn subscribe_to_person(&self, user_id: i32, person_id: i32, person_name: &str, person_img: &str, podcast_id: i32) -> AppResult { - println!("Subscribing user {} to person {}: {}", user_id, person_id, person_name); + // Set user start page - matches Python set_user_startpage function exactly + pub async fn set_user_startpage(&self, user_id: i32, startpage: &str) -> AppResult { + println!("Setting start page for user {} to {}", user_id, startpage); - // Check if person already exists for this user and handle accordingly - let result = match self { + // Check if user settings exist and perform update/insert + let success = match self { DatabasePool::Postgres(pool) => { - let existing = sqlx::query(r#"SELECT personid FROM "People" WHERE userid = $1 AND peopledbid = $2"#) + let existing = sqlx::query(r#"SELECT COUNT(*) as count FROM "UserSettings" WHERE userid = $1"#) .bind(user_id) - .bind(person_id) - .fetch_optional(pool) + .fetch_one(pool) .await?; - if let Some(row) = existing { - let person_db_id: i32 = row.try_get("personid")?; - - // Update associated podcasts to include this podcast_id - let podcast_row = sqlx::query(r#"SELECT associatedpodcasts FROM "People" WHERE personid = $1"#) - .bind(person_db_id) - .fetch_one(pool) - .await?; - let current_podcasts = podcast_row.try_get::, _>("associatedpodcasts")?; - - let updated_podcasts = if let Some(podcasts) = current_podcasts { - if !podcasts.contains(&podcast_id.to_string()) { - format!("{},{}", podcasts, podcast_id) - } else { - podcasts - } - } else { - podcast_id.to_string() - }; - - // Update the record - sqlx::query(r#"UPDATE "People" SET associatedpodcasts = $2 WHERE personid = $1"#) - .bind(person_db_id) - .bind(&updated_podcasts) + let count: i64 = existing.try_get("count")?; + + if count > 0 { + // Update existing record + let result = sqlx::query(r#"UPDATE "UserSettings" SET startpage = $2 WHERE userid = $1"#) + .bind(user_id) + .bind(startpage) .execute(pool) .await?; - - println!("Updated existing person subscription with ID: {}", person_db_id); - person_db_id + result.rows_affected() > 0 } else { - // Insert new person subscription - let row = sqlx::query(r#" - INSERT INTO "People" (userid, name, personimg, peopledbid, associatedpodcasts) - VALUES ($1, $2, $3, $4, $5) - RETURNING personid - "#) + // Insert new record + let result = sqlx::query(r#"INSERT INTO "UserSettings" (userid, startpage) VALUES ($1, $2)"#) .bind(user_id) - .bind(person_name) - .bind(person_img) - .bind(person_id) - .bind(podcast_id.to_string()) - .fetch_one(pool) + .bind(startpage) + .execute(pool) .await?; - - let person_db_id: i32 = row.try_get("personid")?; - println!("Successfully subscribed to person with ID: {}", person_db_id); - person_db_id + result.rows_affected() > 0 } } DatabasePool::MySQL(pool) => { - let existing = sqlx::query("SELECT PersonID FROM People WHERE UserID = ? AND PeopleDBID = ?") + let existing = sqlx::query("SELECT COUNT(*) as count FROM UserSettings WHERE UserID = ?") .bind(user_id) - .bind(person_id) - .fetch_optional(pool) + .fetch_one(pool) .await?; - if let Some(row) = existing { - let person_db_id: i32 = row.try_get("PersonID")?; - - // Update associated podcasts to include this podcast_id - let podcast_row = sqlx::query("SELECT AssociatedPodcasts FROM People WHERE PersonID = ?") - .bind(person_db_id) - .fetch_one(pool) + let count: i64 = existing.try_get("count")?; + + if count > 0 { + // Update existing record + let result = sqlx::query("UPDATE UserSettings SET StartPage = ? WHERE UserID = ?") + .bind(startpage) + .bind(user_id) + .execute(pool) .await?; - let current_podcasts = podcast_row.try_get::, _>("AssociatedPodcasts")?; - - let updated_podcasts = if let Some(podcasts) = current_podcasts { - if !podcasts.contains(&podcast_id.to_string()) { - format!("{},{}", podcasts, podcast_id) - } else { - podcasts - } - } else { - podcast_id.to_string() - }; - - // Update the record - sqlx::query("UPDATE People SET AssociatedPodcasts = ? WHERE PersonID = ?") - .bind(&updated_podcasts) - .bind(person_db_id) - .execute(pool) - .await?; - - println!("Updated existing person subscription with ID: {}", person_db_id); - person_db_id + result.rows_affected() > 0 } else { - // Insert new person subscription - let result = sqlx::query(" - INSERT INTO People (UserID, Name, PersonImg, PeopleDBID, AssociatedPodcasts) - VALUES (?, ?, ?, ?, ?) - ") + // Insert new record + let result = sqlx::query("INSERT INTO UserSettings (UserID, StartPage) VALUES (?, ?)") .bind(user_id) - .bind(person_name) - .bind(person_img) - .bind(person_id) - .bind(podcast_id.to_string()) + .bind(startpage) .execute(pool) .await?; - - let person_db_id = result.last_insert_id() as i32; - println!("Successfully subscribed to person with ID: {}", person_db_id); - person_db_id + result.rows_affected() > 0 } } }; - Ok(result) + println!("Successfully set start page for user {} to {}: {}", user_id, startpage, success); + Ok(success) } - - // Unsubscribe from person - matches Python unsubscribe_from_person function exactly - pub async fn unsubscribe_from_person(&self, user_id: i32, person_id: i32, person_name: &str) -> AppResult { - println!("Unsubscribing user {} from person {}: {}", user_id, person_id, person_name); + + // Update startpage wrapper function for compatibility + pub async fn update_startpage(&self, user_id: i32, startpage: &str) -> AppResult { + self.set_user_startpage(user_id, startpage).await + } + + // Get startpage wrapper function for compatibility + pub async fn get_startpage(&self, user_id: i32) -> AppResult { + self.get_user_startpage(user_id).await + } + + // Get user auto complete seconds setting + pub async fn get_user_auto_complete_seconds(&self, user_id: i32) -> AppResult { + println!("Getting auto complete seconds for user {}", user_id); - // Find and delete the person record - let rows_affected = match self { + let auto_complete_seconds = match self { DatabasePool::Postgres(pool) => { - sqlx::query(r#"DELETE FROM "People" WHERE userid = $1 AND peopledbid = $2"#) + let row = sqlx::query(r#"SELECT autocompleteseconds FROM "UserSettings" WHERE userid = $1"#) .bind(user_id) - .bind(person_id) - .execute(pool) - .await? - .rows_affected() + .fetch_optional(pool) + .await?; + + if let Some(row) = row { + row.try_get::, _>("autocompleteseconds")?.unwrap_or(0) + } else { + 0 + } } DatabasePool::MySQL(pool) => { - sqlx::query("DELETE FROM People WHERE UserID = ? AND PeopleDBID = ?") + let row = sqlx::query("SELECT AutoCompleteSeconds FROM UserSettings WHERE UserID = ?") .bind(user_id) - .bind(person_id) - .execute(pool) - .await? - .rows_affected() + .fetch_optional(pool) + .await?; + + if let Some(row) = row { + row.try_get::, _>("AutoCompleteSeconds")?.unwrap_or(0) + } else { + 0 + } } }; - if rows_affected > 0 { - // Check if this was the last subscriber to this person - let count: i64 = match self { - DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#"SELECT COUNT(*) as count FROM "People" WHERE peopledbid = $1"#) - .bind(person_id) - .fetch_one(pool) + Ok(auto_complete_seconds) + } + + // Set user auto complete seconds setting + pub async fn set_user_auto_complete_seconds(&self, user_id: i32, seconds: i32) -> AppResult { + println!("Setting auto complete seconds for user {} to {}", user_id, seconds); + + // Check if user settings exist and perform update/insert + let success = match self { + DatabasePool::Postgres(pool) => { + let existing = sqlx::query(r#"SELECT COUNT(*) as count FROM "UserSettings" WHERE userid = $1"#) + .bind(user_id) + .fetch_one(pool) + .await?; + + let count: i64 = existing.try_get("count")?; + + if count > 0 { + // Update existing record + let result = sqlx::query(r#"UPDATE "UserSettings" SET autocompleteseconds = $2 WHERE userid = $1"#) + .bind(user_id) + .bind(seconds) + .execute(pool) .await?; - row.try_get("count")? - } - DatabasePool::MySQL(pool) => { - let row = sqlx::query("SELECT COUNT(*) as count FROM People WHERE PeopleDBID = ?") - .bind(person_id) - .fetch_one(pool) + result.rows_affected() > 0 + } else { + // Insert new record with default theme + let result = sqlx::query(r#"INSERT INTO "UserSettings" (userid, autocompleteseconds, theme) VALUES ($1, $2, 'Nordic')"#) + .bind(user_id) + .bind(seconds) + .execute(pool) .await?; - row.try_get("count")? + result.rows_affected() > 0 } - }; - - // If no more subscribers, clean up episodes - if count == 0 { - match self { - DatabasePool::Postgres(pool) => { - let _ = sqlx::query(r#"DELETE FROM "PeopleEpisodes" WHERE personid = $1"#) - .bind(person_id) - .execute(pool) - .await; - } - DatabasePool::MySQL(pool) => { - let _ = sqlx::query("DELETE FROM PeopleEpisodes WHERE PersonID = ?") - .bind(person_id) - .execute(pool) - .await; - } + } + DatabasePool::MySQL(pool) => { + let existing = sqlx::query("SELECT COUNT(*) as count FROM UserSettings WHERE UserID = ?") + .bind(user_id) + .fetch_one(pool) + .await?; + + let count: i64 = existing.try_get("count")?; + + if count > 0 { + // Update existing record + let result = sqlx::query("UPDATE UserSettings SET AutoCompleteSeconds = ? WHERE UserID = ?") + .bind(seconds) + .bind(user_id) + .execute(pool) + .await?; + result.rows_affected() > 0 + } else { + // Insert new record with default theme + let result = sqlx::query("INSERT INTO UserSettings (UserID, AutoCompleteSeconds, Theme) VALUES (?, ?, 'Nordic')") + .bind(user_id) + .bind(seconds) + .execute(pool) + .await?; + result.rows_affected() > 0 } } - - println!("Successfully unsubscribed from person {}", person_id); - Ok(true) - } else { - println!("Person subscription not found for user {} and person {}", user_id, person_id); - Ok(false) - } - } - - // Get person subscriptions - matches Python get_person_subscriptions function exactly - pub async fn get_person_subscriptions(&self, user_id: i32) -> AppResult> { - println!("Getting person subscriptions for user {}", user_id); - - let mut subscriptions = Vec::new(); + }; + println!("Successfully set auto complete seconds for user {} to {}: {}", user_id, seconds, success); + Ok(success) + } + + // Get episode duration in seconds + pub async fn get_episode_duration(&self, episode_id: i32) -> AppResult { match self { DatabasePool::Postgres(pool) => { - let rows = sqlx::query(r#" - SELECT personid, userid, name, personimg, peopledbid, associatedpodcasts - FROM "People" - WHERE userid = $1 - ORDER BY name - "#) - .bind(user_id) - .fetch_all(pool) + let row = sqlx::query(r#"SELECT episodeduration FROM "Episodes" WHERE episodeid = $1"#) + .bind(episode_id) + .fetch_optional(pool) .await?; - for row in rows { - let subscription = serde_json::json!({ - "personid": row.try_get::("personid")?, - "userid": row.try_get::("userid")?, - "name": row.try_get::("name")?, - "image": row.try_get::("personimg")?, - "peopledbid": row.try_get::("peopledbid")?, - "associatedpodcasts": row.try_get::, _>("associatedpodcasts")? - }); - subscriptions.push(subscription); + if let Some(row) = row { + Ok(row.try_get::, _>("episodeduration")?.unwrap_or(0)) + } else { + Ok(0) } } DatabasePool::MySQL(pool) => { - let rows = sqlx::query(" - SELECT PersonID, UserID, Name, PersonImg, PeopleDBID, AssociatedPodcasts - FROM People - WHERE UserID = ? - ORDER BY Name - ") - .bind(user_id) - .fetch_all(pool) + let row = sqlx::query("SELECT EpisodeDuration FROM Episodes WHERE EpisodeID = ?") + .bind(episode_id) + .fetch_optional(pool) .await?; - for row in rows { - let subscription = serde_json::json!({ - "personid": row.try_get::("PersonID")?, - "userid": row.try_get::("UserID")?, - "name": row.try_get::("Name")?, - "image": row.try_get::("PersonImg")?, - "peopledbid": row.try_get::("PeopleDBID")?, - "associatedpodcasts": row.try_get::, _>("AssociatedPodcasts")? - }); - subscriptions.push(subscription); + if let Some(row) = row { + Ok(row.try_get::, _>("EpisodeDuration")?.unwrap_or(0)) + } else { + Ok(0) } } } - - println!("Found {} person subscriptions for user {}", subscriptions.len(), user_id); - Ok(subscriptions) } - - // Get person episodes - matches Python return_person_episodes function exactly - pub async fn get_person_episodes(&self, user_id: i32, person_id: i32) -> AppResult> { - println!("Getting episodes for user {} and person {}", user_id, person_id); - - let mut episodes = Vec::new(); - + + // Get YouTube episode duration in seconds + pub async fn get_youtube_episode_duration(&self, episode_id: i32) -> AppResult { match self { DatabasePool::Postgres(pool) => { - let rows = sqlx::query(r#" - SELECT - e.episodeid, -- Will be NULL if no match in Episodes table - pe.episodetitle, - pe.episodedescription, - pe.episodeurl, - CASE - WHEN pe.episodeartwork IS NULL THEN - (SELECT artworkurl FROM "Podcasts" WHERE podcastid = pe.podcastid) - ELSE pe.episodeartwork - END as episodeartwork, - pe.episodepubdate, - pe.episodeduration, - p.podcastname, - CASE - WHEN ( - SELECT 1 FROM "Podcasts" - WHERE podcastid = pe.podcastid - AND userid = $1 - ) IS NOT NULL THEN - CASE - WHEN s.episodeid IS NOT NULL THEN TRUE - ELSE FALSE - END - ELSE FALSE - END AS saved, - CASE - WHEN ( - SELECT 1 FROM "Podcasts" - WHERE podcastid = pe.podcastid - AND userid = $1 - ) IS NOT NULL THEN - CASE - WHEN d.episodeid IS NOT NULL THEN TRUE - ELSE FALSE - END - ELSE FALSE - END AS downloaded, - CASE - WHEN ( - SELECT 1 FROM "Podcasts" - WHERE podcastid = pe.podcastid - AND userid = $1 - ) IS NOT NULL THEN - COALESCE(h.listenduration, 0) - ELSE 0 - END AS listenduration, - FALSE as is_youtube - FROM "PeopleEpisodes" pe - INNER JOIN "People" pp ON pe.personid = pp.personid - INNER JOIN "Podcasts" p ON pe.podcastid = p.podcastid - LEFT JOIN "Episodes" e ON e.episodeurl = pe.episodeurl AND e.podcastid = pe.podcastid - LEFT JOIN ( - SELECT * FROM "SavedEpisodes" WHERE userid = $2 - ) s ON s.episodeid = e.episodeid - LEFT JOIN ( - SELECT * FROM "DownloadedEpisodes" WHERE userid = $3 - ) d ON d.episodeid = e.episodeid - LEFT JOIN ( - SELECT * FROM "UserEpisodeHistory" WHERE userid = $4 - ) h ON h.episodeid = e.episodeid - WHERE pe.personid = $5 - AND pe.episodepubdate >= NOW() - INTERVAL '30 days' - ORDER BY pe.episodepubdate DESC - "#) - .bind(user_id) // $1 - .bind(user_id) // $2 - .bind(user_id) // $3 - .bind(user_id) // $4 - .bind(person_id) // $5 - .fetch_all(pool) - .await?; - - for row in rows { - let episodeid = row.try_get::, _>("episodeid")?; - let episodetitle = row.try_get::("episodetitle")?; - let episodedescription = row.try_get::("episodedescription")?; - let episodeurl = row.try_get::("episodeurl")?; - let episodeartwork = row.try_get::, _>("episodeartwork")?; - let dt = row.try_get::("episodepubdate")?; - let episodepubdate = dt.format("%Y-%m-%dT%H:%M:%S").to_string(); - let episodeduration = row.try_get::("episodeduration")?; - let podcastname = row.try_get::("podcastname")?; - let saved = row.try_get::("saved")?; - let downloaded = row.try_get::("downloaded")?; - let listenduration = row.try_get::("listenduration")?; - let is_youtube = row.try_get::("is_youtube")?; - - let episode = serde_json::json!({ - "episodeid": episodeid.unwrap_or(-1), - "episodetitle": episodetitle, - "episodedescription": episodedescription, - "episodeurl": episodeurl, - "episodeartwork": episodeartwork, - "episodepubdate": episodepubdate, - "episodeduration": episodeduration, - "podcastname": podcastname, - "saved": saved, - "downloaded": downloaded, - "listenduration": listenduration, - "is_youtube": is_youtube - }); - episodes.push(episode); - } - } - DatabasePool::MySQL(pool) => { - let rows = sqlx::query(" - SELECT - e.EpisodeID, -- Will be NULL if no match in Episodes table - pe.EpisodeTitle, - pe.EpisodeDescription, - pe.EpisodeURL, - COALESCE(pe.EpisodeArtwork, p.ArtworkURL) as EpisodeArtwork, - pe.EpisodePubDate, - pe.EpisodeDuration, - p.PodcastName, - IF( - EXISTS( - SELECT 1 FROM Podcasts - WHERE PodcastID = pe.PodcastID - AND UserID = ? - ), - IF(s.EpisodeID IS NOT NULL, TRUE, FALSE), - FALSE - ) AS Saved, - IF( - EXISTS( - SELECT 1 FROM Podcasts - WHERE PodcastID = pe.PodcastID - AND UserID = ? - ), - IF(d.EpisodeID IS NOT NULL, TRUE, FALSE), - FALSE - ) AS Downloaded, - IF( - EXISTS( - SELECT 1 FROM Podcasts - WHERE PodcastID = pe.PodcastID - AND UserID = ? - ), - COALESCE(h.ListenDuration, 0), - 0 - ) AS ListenDuration, - FALSE as is_youtube - FROM PeopleEpisodes pe - INNER JOIN People pp ON pe.PersonID = pp.PersonID - INNER JOIN Podcasts p ON pe.PodcastID = p.PodcastID - LEFT JOIN Episodes e ON e.EpisodeURL = pe.EpisodeURL AND e.PodcastID = pe.PodcastID - LEFT JOIN ( - SELECT * FROM SavedEpisodes WHERE UserID = ? - ) s ON s.EpisodeID = e.EpisodeID - LEFT JOIN ( - SELECT * FROM DownloadedEpisodes WHERE UserID = ? - ) d ON d.EpisodeID = e.EpisodeID - LEFT JOIN ( - SELECT * FROM UserEpisodeHistory WHERE UserID = ? - ) h ON h.EpisodeID = e.EpisodeID - WHERE pe.PersonID = ? - AND pe.EpisodePubDate >= DATE_SUB(NOW(), INTERVAL 30 DAY) - ORDER BY pe.EpisodePubDate DESC - ") - .bind(user_id) // 1st ? - .bind(user_id) // 2nd ? - .bind(user_id) // 3rd ? - .bind(user_id) // 4th ? - .bind(user_id) // 5th ? - .bind(user_id) // 6th ? - .bind(person_id) // 7th ? - .fetch_all(pool) - .await?; - - for row in rows { - let episodeid = row.try_get::, _>("EpisodeID")?; - let episodetitle = row.try_get::("EpisodeTitle")?; - let episodedescription = row.try_get::("EpisodeDescription")?; - let episodeurl = row.try_get::("EpisodeURL")?; - let episodeartwork = row.try_get::, _>("EpisodeArtwork")?; - let dt = row.try_get::("EpisodePubDate")?; - let episodepubdate = dt.format("%Y-%m-%dT%H:%M:%S").to_string(); - let episodeduration = row.try_get::("EpisodeDuration")?; - let podcastname = row.try_get::("PodcastName")?; - let saved = row.try_get::("Saved")?; - let downloaded = row.try_get::("Downloaded")?; - let listenduration = row.try_get::("ListenDuration")?; - let is_youtube = row.try_get::("is_youtube")?; - - let episode = serde_json::json!({ - "episodeid": episodeid.unwrap_or(-1), - "episodetitle": episodetitle, - "episodedescription": episodedescription, - "episodeurl": episodeurl, - "episodeartwork": episodeartwork, - "episodepubdate": episodepubdate, - "episodeduration": episodeduration, - "podcastname": podcastname, - "saved": saved, - "downloaded": downloaded, - "listenduration": listenduration, - "is_youtube": is_youtube - }); - episodes.push(episode); - } - } - } - - println!("Found {} episodes for user {} and person {}", episodes.len(), user_id, person_id); - Ok(episodes) - } - - // Check existing YouTube channel subscription - matches Python check_existing_channel_subscription function exactly - pub async fn check_existing_channel_subscription(&self, channel_id: &str, user_id: i32) -> AppResult> { - println!("Checking existing channel subscription for {} and user {}", channel_id, user_id); - - match self { - DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#"SELECT podcastid FROM "Podcasts" WHERE feedurl = $1 AND userid = $2"#) - .bind(format!("https://www.youtube.com/channel/{}", channel_id)) - .bind(user_id) + let row = sqlx::query(r#"SELECT duration FROM "YouTubeVideos" WHERE videoid = $1"#) + .bind(episode_id) .fetch_optional(pool) .await?; if let Some(row) = row { - let podcast_id: i32 = row.try_get("podcastid")?; - println!("Found existing subscription with ID: {}", podcast_id); - Ok(Some(podcast_id)) + Ok(row.try_get::, _>("duration")?.unwrap_or(0)) } else { - println!("No existing subscription found"); - Ok(None) + Ok(0) } } DatabasePool::MySQL(pool) => { - let row = sqlx::query("SELECT PodcastID FROM Podcasts WHERE FeedURL = ? AND UserID = ?") - .bind(format!("https://www.youtube.com/channel/{}", channel_id)) - .bind(user_id) + let row = sqlx::query("SELECT Duration FROM YouTubeVideos WHERE VideoID = ?") + .bind(episode_id) .fetch_optional(pool) .await?; if let Some(row) = row { - let podcast_id: i32 = row.try_get("podcastid")?; - println!("Found existing subscription with ID: {}", podcast_id); - Ok(Some(podcast_id)) + Ok(row.try_get::, _>("Duration")?.unwrap_or(0)) } else { - println!("No existing subscription found"); - Ok(None) + Ok(0) } } } } - - // Add YouTube channel - matches Python add_youtube_channel function exactly - pub async fn add_youtube_channel(&self, channel_info: &std::collections::HashMap, user_id: i32, feed_cutoff: i32) -> AppResult { - println!("Adding YouTube channel to database for user {}", user_id); - - let channel_id = channel_info.get("channel_id").ok_or_else(|| AppError::bad_request("Channel ID is required"))?; - let empty_string = String::new(); - let name = channel_info.get("name").unwrap_or(&empty_string); - let description = channel_info.get("description").unwrap_or(&empty_string); - let thumbnail_url = channel_info.get("thumbnail_url").unwrap_or(&empty_string); - let feed_url = format!("https://www.youtube.com/channel/{}", channel_id); - - // Insert new YouTube channel as podcast - let podcast_id = match self { + + // Get users who have auto-complete enabled (auto_complete_seconds > 0) + pub async fn get_users_with_auto_complete_enabled(&self) -> AppResult> { + match self { DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#" - INSERT INTO "Podcasts" ( - userid, podcastname, artworkurl, description, episodecount, - websiteurl, feedurl, author, categories, explicit, podcastindexid, feedcutoffdays, isyoutubechannel - ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13) - RETURNING podcastid - "#) - .bind(user_id) - .bind(name) - .bind(thumbnail_url) - .bind(description) - .bind(0) // Initial episode count - .bind(&feed_url) - .bind(&feed_url) - .bind(name) // Use channel name as author - .bind("{}") // Empty categories for YouTube - .bind(false) // Not explicit by default - .bind(0) // No podcast index ID for YouTube - .bind(feed_cutoff) - .bind(true) // Is YouTube channel - .fetch_one(pool) - .await?; - - row.try_get("podcastid")? - } - DatabasePool::MySQL(pool) => { - let result = sqlx::query(r#" - INSERT INTO Podcasts ( - UserID, PodcastName, ArtworkURL, Description, EpisodeCount, - WebsiteURL, FeedURL, Author, Categories, Explicit, PodcastIndexID, FeedCutoffDays, IsYouTubeChannel - ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) - "#) - .bind(user_id) - .bind(name) - .bind(thumbnail_url) - .bind(description) - .bind(0) // Initial episode count - .bind(&feed_url) - .bind(&feed_url) - .bind(name) // Use channel name as author - .bind("{}") // Empty categories for YouTube - .bind(false) // Not explicit by default - .bind(0) // No podcast index ID for YouTube - .bind(feed_cutoff) - .bind(true) // Is YouTube channel - .execute(pool) - .await?; - - result.last_insert_id() as i32 - } - }; - - // Update UserStats PodcastsAdded counter - match self { - DatabasePool::Postgres(pool) => { - let _ = sqlx::query(r#" - UPDATE "UserStats" - SET podcastsadded = podcastsadded + 1 - WHERE userid = $1 + let rows = sqlx::query(r#" + SELECT us.userid, us.autocompleteseconds + FROM "UserSettings" us + WHERE us.autocompleteseconds > 0 "#) - .bind(user_id) - .execute(pool) - .await; + .fetch_all(pool) + .await?; + + let mut users = Vec::new(); + for row in rows { + users.push(UserAutoComplete { + user_id: row.try_get("userid")?, + auto_complete_seconds: row.try_get("autocompleteseconds")?, + }); + } + Ok(users) } DatabasePool::MySQL(pool) => { - let _ = sqlx::query("UPDATE UserStats SET PodcastsAdded = PodcastsAdded + 1 WHERE UserID = ?") - .bind(user_id) - .execute(pool) - .await; + let rows = sqlx::query(" + SELECT UserID, AutoCompleteSeconds + FROM UserSettings + WHERE AutoCompleteSeconds > 0 + ") + .fetch_all(pool) + .await?; + + let mut users = Vec::new(); + for row in rows { + users.push(UserAutoComplete { + user_id: row.try_get("UserID")?, + auto_complete_seconds: row.try_get("AutoCompleteSeconds")?, + }); + } + Ok(users) } } - - println!("Successfully added YouTube channel with ID: {}", podcast_id); - Ok(podcast_id) } - // Check if YouTube channel already exists - matches Python check_youtube_channel function exactly - pub async fn check_youtube_channel(&self, user_id: i32, channel_name: &str, channel_url: &str) -> AppResult { + // Auto-complete episodes for a user based on their setting + pub async fn auto_complete_user_episodes(&self, user_id: i32, auto_complete_seconds: i32) -> AppResult { + if auto_complete_seconds <= 0 { + return Ok(0); + } + + let mut completed_count = 0; + match self { DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#" - SELECT podcastid FROM "Podcasts" - WHERE userid = $1 AND podcastname = $2 AND feedurl = $3 AND isyoutubechannel = TRUE + // Handle regular episodes + let episode_rows = sqlx::query(r#" + SELECT e.episodeid, e.episodeduration, COALESCE(h.listenduration, 0) as listenduration + FROM "Episodes" e + LEFT JOIN "UserEpisodeHistory" h ON e.episodeid = h.episodeid AND h.userid = $1 + WHERE e.completed = false + AND e.episodeduration > 0 + AND h.listenduration > 0 + AND (e.episodeduration - h.listenduration) <= $2 "#) - .bind(user_id) - .bind(channel_name) - .bind(channel_url) - .fetch_optional(pool) - .await?; - - Ok(row.is_some()) - } - DatabasePool::MySQL(pool) => { - let row = sqlx::query(r#" - SELECT PodcastID FROM Podcasts - WHERE UserID = ? AND PodcastName = ? AND FeedURL = ? AND IsYouTubeChannel = TRUE + .bind(user_id) + .bind(auto_complete_seconds) + .fetch_all(pool) + .await?; + + for row in episode_rows { + let episode_id: i32 = row.try_get("episodeid")?; + let _ = self.mark_episode_completed(episode_id, user_id, false).await; + completed_count += 1; + } + + // Handle YouTube episodes + let youtube_rows = sqlx::query(r#" + SELECT v.videoid, v.duration, COALESCE(h.listenduration, 0) as listenduration + FROM "YouTubeVideos" v + LEFT JOIN "UserVideoHistory" h ON v.videoid = h.videoid AND h.userid = $1 + WHERE v.completed = false + AND v.duration > 0 + AND h.listenduration > 0 + AND (v.duration - h.listenduration) <= $2 "#) - .bind(user_id) - .bind(channel_name) - .bind(channel_url) - .fetch_optional(pool) - .await?; - - Ok(row.is_some()) - } - } - } - - // Remove old YouTube videos - matches Python remove_old_youtube_videos function exactly - pub async fn remove_old_youtube_videos(&self, podcast_id: i32, cutoff_date: chrono::DateTime) -> AppResult<()> { - println!("Removing old YouTube videos for podcast {} before {}", podcast_id, cutoff_date); - - let cutoff_naive = cutoff_date.naive_utc(); - - let rows_affected = match self { - DatabasePool::Postgres(pool) => { - sqlx::query(r#"DELETE FROM "YouTubeVideos" WHERE podcastid = $1 AND publishedat < $2"#) - .bind(podcast_id) - .bind(cutoff_naive) - .execute(pool) - .await? - .rows_affected() + .bind(user_id) + .bind(auto_complete_seconds) + .fetch_all(pool) + .await?; + + for row in youtube_rows { + let video_id: i32 = row.try_get("videoid")?; + let _ = self.mark_episode_completed(video_id, user_id, true).await; + completed_count += 1; + } } DatabasePool::MySQL(pool) => { - sqlx::query("DELETE FROM YouTubeVideos WHERE PodcastID = ? AND PublishedAt < ?") - .bind(podcast_id) - .bind(cutoff_naive) - .execute(pool) - .await? - .rows_affected() + // Handle regular episodes + let episode_rows = sqlx::query(" + SELECT e.EpisodeID, e.EpisodeDuration, COALESCE(h.ListenDuration, 0) as ListenDuration + FROM Episodes e + LEFT JOIN UserEpisodeHistory h ON e.EpisodeID = h.EpisodeID AND h.UserID = ? + WHERE e.Completed = 0 + AND e.EpisodeDuration > 0 + AND h.ListenDuration > 0 + AND (e.EpisodeDuration - h.ListenDuration) <= ? + ") + .bind(user_id) + .bind(auto_complete_seconds) + .fetch_all(pool) + .await?; + + for row in episode_rows { + let episode_id: i32 = row.try_get("EpisodeID")?; + let _ = self.mark_episode_completed(episode_id, user_id, false).await; + completed_count += 1; + } + + // Handle YouTube episodes + let youtube_rows = sqlx::query(" + SELECT v.VideoID, v.Duration, COALESCE(h.ListenDuration, 0) as ListenDuration + FROM YouTubeVideos v + LEFT JOIN UserVideoHistory h ON v.VideoID = h.VideoID AND h.UserID = ? + WHERE v.Completed = 0 + AND v.Duration > 0 + AND h.ListenDuration > 0 + AND (v.Duration - h.ListenDuration) <= ? + ") + .bind(user_id) + .bind(auto_complete_seconds) + .fetch_all(pool) + .await?; + + for row in youtube_rows { + let video_id: i32 = row.try_get("VideoID")?; + let _ = self.mark_episode_completed(video_id, user_id, true).await; + completed_count += 1; + } } - }; - - println!("Removed {} old YouTube videos", rows_affected); - Ok(()) + } + + Ok(completed_count) } - // Get existing YouTube videos - matches Python get_existing_youtube_videos function exactly - pub async fn get_existing_youtube_videos(&self, podcast_id: i32) -> AppResult> { - println!("Getting existing YouTube videos for podcast {}", podcast_id); - - let mut video_urls = Vec::new(); - - match self { + // Subscribe to person - matches Python subscribe_to_person function exactly + pub async fn subscribe_to_person(&self, user_id: i32, person_id: i32, person_name: &str, person_img: &str, podcast_id: i32) -> AppResult { + println!("Subscribing user {} to person {}: {}", user_id, person_id, person_name); + + // Check if person already exists for this user and handle accordingly + let result = match self { DatabasePool::Postgres(pool) => { - let rows = sqlx::query(r#"SELECT videourl FROM "YouTubeVideos" WHERE podcastid = $1"#) - .bind(podcast_id) - .fetch_all(pool) - .await?; + // When peopledbid is 0 or not set, use name for lookup to avoid collisions + let existing = if person_id == 0 { + sqlx::query(r#"SELECT personid FROM "People" WHERE userid = $1 AND LOWER(name) = LOWER($2)"#) + .bind(user_id) + .bind(person_name) + .fetch_optional(pool) + .await? + } else { + sqlx::query(r#"SELECT personid FROM "People" WHERE userid = $1 AND peopledbid = $2"#) + .bind(user_id) + .bind(person_id) + .fetch_optional(pool) + .await? + }; - for row in rows { - let url: String = row.try_get("videourl")?; - video_urls.push(url); + if let Some(row) = existing { + let person_db_id: i32 = row.try_get("personid")?; + + // Update associated podcasts to include this podcast_id + let podcast_row = sqlx::query(r#"SELECT associatedpodcasts FROM "People" WHERE personid = $1"#) + .bind(person_db_id) + .fetch_one(pool) + .await?; + let current_podcasts = podcast_row.try_get::, _>("associatedpodcasts")?; + + let updated_podcasts = if let Some(podcasts) = current_podcasts { + if !podcasts.contains(&podcast_id.to_string()) { + format!("{},{}", podcasts, podcast_id) + } else { + podcasts + } + } else { + podcast_id.to_string() + }; + + // Update the record + sqlx::query(r#"UPDATE "People" SET associatedpodcasts = $2 WHERE personid = $1"#) + .bind(person_db_id) + .bind(&updated_podcasts) + .execute(pool) + .await?; + + println!("Updated existing person subscription with ID: {}", person_db_id); + person_db_id + } else { + // Insert new person subscription + let row = sqlx::query(r#" + INSERT INTO "People" (userid, name, personimg, peopledbid, associatedpodcasts) + VALUES ($1, $2, $3, $4, $5) + RETURNING personid + "#) + .bind(user_id) + .bind(person_name) + .bind(person_img) + .bind(person_id) + .bind(podcast_id.to_string()) + .fetch_one(pool) + .await?; + + let person_db_id: i32 = row.try_get("personid")?; + println!("Successfully subscribed to person with ID: {}", person_db_id); + person_db_id } } DatabasePool::MySQL(pool) => { - let rows = sqlx::query("SELECT VideoURL FROM YouTubeVideos WHERE PodcastID = ?") - .bind(podcast_id) - .fetch_all(pool) - .await?; + // When peopledbid is 0 or not set, use name for lookup to avoid collisions + let existing = if person_id == 0 { + sqlx::query("SELECT PersonID FROM People WHERE UserID = ? AND LOWER(Name) = LOWER(?)") + .bind(user_id) + .bind(person_name) + .fetch_optional(pool) + .await? + } else { + sqlx::query("SELECT PersonID FROM People WHERE UserID = ? AND PeopleDBID = ?") + .bind(user_id) + .bind(person_id) + .fetch_optional(pool) + .await? + }; - for row in rows { - let url: String = row.try_get("VideoURL")?; - video_urls.push(url); - } - } - } - - println!("Found {} existing videos", video_urls.len()); - Ok(video_urls) - } - - // Add YouTube videos - matches Python add_youtube_videos function exactly - pub async fn add_youtube_videos(&self, podcast_id: i32, videos: &[serde_json::Value]) -> AppResult<()> { - println!("Adding {} YouTube videos for podcast {}", videos.len(), podcast_id); - - for video in videos { - let video_id = video.get("id").and_then(|v| v.as_str()).unwrap_or(""); - let title = video.get("title").and_then(|v| v.as_str()).unwrap_or(""); - let description = video.get("description").and_then(|v| v.as_str()).unwrap_or(""); - let url = video.get("url").and_then(|v| v.as_str()).unwrap_or(""); - let thumbnail = video.get("thumbnail").and_then(|v| v.as_str()).unwrap_or(""); - - println!("Processing video {} for database insertion", video_id); - println!("Video data: {:?}", video); - - let duration = if let Some(duration_str) = video.get("duration").and_then(|v| v.as_str()) { - println!("Duration as string: '{}'", duration_str); - let parsed = crate::handlers::youtube::parse_youtube_duration(duration_str).unwrap_or(0) as i32; - println!("Parsed duration: {}", parsed); - parsed - } else { - let int_duration = video.get("duration").and_then(|v| v.as_i64()).unwrap_or(0) as i32; - println!("Duration as integer: {}", int_duration); - int_duration - }; - - // Parse publish date - let publish_date = if let Some(date_str) = video.get("publish_date").and_then(|v| v.as_str()) { - chrono::DateTime::parse_from_rfc3339(date_str) - .map(|dt| dt.naive_utc()) - .unwrap_or_else(|_| chrono::Utc::now().naive_utc()) - } else { - chrono::Utc::now().naive_utc() - }; + if let Some(row) = existing { + let person_db_id: i32 = row.try_get("PersonID")?; - match self { - DatabasePool::Postgres(pool) => { - let _ = sqlx::query(r#" - INSERT INTO "YouTubeVideos" ( - podcastid, youtubevideoid, videotitle, videodescription, videourl, - thumbnailurl, publishedat, duration, completed, listenposition - ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) - "#) - .bind(podcast_id) - .bind(video_id) - .bind(title) - .bind(description) - .bind(url) - .bind(thumbnail) - .bind(publish_date) - .bind(duration) - .bind(false) // Not completed - .bind(0) // Listen position 0 + // Update associated podcasts to include this podcast_id + let podcast_row = sqlx::query("SELECT AssociatedPodcasts FROM People WHERE PersonID = ?") + .bind(person_db_id) + .fetch_one(pool) + .await?; + let current_podcasts = podcast_row.try_get::, _>("AssociatedPodcasts")?; + + let updated_podcasts = if let Some(podcasts) = current_podcasts { + if !podcasts.contains(&podcast_id.to_string()) { + format!("{},{}", podcasts, podcast_id) + } else { + podcasts + } + } else { + podcast_id.to_string() + }; + + // Update the record + sqlx::query("UPDATE People SET AssociatedPodcasts = ? WHERE PersonID = ?") + .bind(&updated_podcasts) + .bind(person_db_id) .execute(pool) - .await; - } - DatabasePool::MySQL(pool) => { - let _ = sqlx::query(r#" - INSERT IGNORE INTO YouTubeVideos ( - PodcastID, YouTubeVideoID, VideoTitle, VideoDescription, VideoURL, - ThumbnailURL, PublishedAt, Duration, Completed, ListenPosition - ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?) - "#) - .bind(podcast_id) - .bind(video_id) - .bind(title) - .bind(description) - .bind(url) - .bind(thumbnail) - .bind(publish_date) - .bind(duration) - .bind(false) // Not completed - .bind(0) // Listen position 0 + .await?; + + println!("Updated existing person subscription with ID: {}", person_db_id); + person_db_id + } else { + // Insert new person subscription + let result = sqlx::query(" + INSERT INTO People (UserID, Name, PersonImg, PeopleDBID, AssociatedPodcasts) + VALUES (?, ?, ?, ?, ?) + ") + .bind(user_id) + .bind(person_name) + .bind(person_img) + .bind(person_id) + .bind(podcast_id.to_string()) .execute(pool) - .await; + .await?; + + let person_db_id = result.last_insert_id() as i32; + println!("Successfully subscribed to person with ID: {}", person_db_id); + person_db_id } } - } + }; - println!("Successfully added {} YouTube videos", videos.len()); - Ok(()) + Ok(result) } - // Get video date using web scraping - matches Python get_video_date function exactly - pub async fn get_video_date(&self, video_id: &str) -> AppResult> { - let client = reqwest::Client::new(); - let url = format!("https://www.youtube.com/watch?v={}", video_id); - - let response = client.get(&url) - .header("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36") - .send() - .await - .map_err(|e| AppError::external_error(&format!("Failed to fetch video page: {}", e)))?; - - let html = response.text().await - .map_err(|e| AppError::external_error(&format!("Failed to read response: {}", e)))?; + // Unsubscribe from person - matches Python unsubscribe_from_person function exactly + pub async fn unsubscribe_from_person(&self, user_id: i32, person_id: i32, person_name: &str) -> AppResult { + println!("Unsubscribing user {} from person {}: {}", user_id, person_id, person_name); - // Parse HTML to find upload date (simplified version of Python's BeautifulSoup approach) - if let Some(start) = html.find("\"uploadDate\":\"") { - let date_start = start + "\"uploadDate\":\"".len(); - if let Some(end) = html[date_start..].find("\"") { - let date_str = &html[date_start..date_start + end]; - if let Ok(parsed_date) = chrono::DateTime::parse_from_rfc3339(date_str) { - return Ok(parsed_date.with_timezone(&chrono::Utc)); + // Find and delete the person record + let rows_affected = match self { + DatabasePool::Postgres(pool) => { + // When peopledbid is 0 or not set, use name for lookup to avoid collisions + if person_id == 0 { + sqlx::query(r#"DELETE FROM "People" WHERE userid = $1 AND LOWER(name) = LOWER($2)"#) + .bind(user_id) + .bind(person_name) + .execute(pool) + .await? + .rows_affected() + } else { + sqlx::query(r#"DELETE FROM "People" WHERE userid = $1 AND peopledbid = $2"#) + .bind(user_id) + .bind(person_id) + .execute(pool) + .await? + .rows_affected() } } - } - - // Fallback to current time minus some hours if date not found - Ok(chrono::Utc::now() - chrono::Duration::hours(1)) - } - - // Update episode count for podcast - matches Python update_episode_count function exactly - pub async fn update_episode_count(&self, podcast_id: i32) -> AppResult<()> { - println!("Updating episode count for podcast {}", podcast_id); + DatabasePool::MySQL(pool) => { + // When peopledbid is 0 or not set, use name for lookup to avoid collisions + if person_id == 0 { + sqlx::query("DELETE FROM People WHERE UserID = ? AND LOWER(Name) = LOWER(?)") + .bind(user_id) + .bind(person_name) + .execute(pool) + .await? + .rows_affected() + } else { + sqlx::query("DELETE FROM People WHERE UserID = ? AND PeopleDBID = ?") + .bind(user_id) + .bind(person_id) + .execute(pool) + .await? + .rows_affected() + } + } + }; - // Count episodes and YouTube videos - let (episode_count, youtube_count) = match self { - DatabasePool::Postgres(pool) => { - let episode_row = sqlx::query(r#"SELECT COUNT(*) as count FROM "Episodes" WHERE podcastid = $1"#) - .bind(podcast_id) - .fetch_one(pool) - .await?; - let episode_count: i64 = episode_row.try_get("count")?; - - let youtube_row = sqlx::query(r#"SELECT COUNT(*) as count FROM "YouTubeVideos" WHERE podcastid = $1"#) - .bind(podcast_id) - .fetch_one(pool) - .await?; - let youtube_count: i64 = youtube_row.try_get("count")?; - - (episode_count, youtube_count) - } - DatabasePool::MySQL(pool) => { - let episode_row = sqlx::query("SELECT COUNT(*) as count FROM Episodes WHERE PodcastID = ?") - .bind(podcast_id) - .fetch_one(pool) - .await?; - let episode_count: i64 = episode_row.try_get("count")?; - - let youtube_row = sqlx::query("SELECT COUNT(*) as count FROM YouTubeVideos WHERE PodcastID = ?") - .bind(podcast_id) - .fetch_one(pool) - .await?; - let youtube_count: i64 = youtube_row.try_get("count")?; - - (episode_count, youtube_count) + if rows_affected > 0 { + // Check if this was the last subscriber to this person + let count: i64 = match self { + DatabasePool::Postgres(pool) => { + let row = sqlx::query(r#"SELECT COUNT(*) as count FROM "People" WHERE peopledbid = $1"#) + .bind(person_id) + .fetch_one(pool) + .await?; + row.try_get("count")? + } + DatabasePool::MySQL(pool) => { + let row = sqlx::query("SELECT COUNT(*) as count FROM People WHERE PeopleDBID = ?") + .bind(person_id) + .fetch_one(pool) + .await?; + row.try_get("count")? + } + }; + + // If no more subscribers, clean up episodes + if count == 0 { + match self { + DatabasePool::Postgres(pool) => { + let _ = sqlx::query(r#"DELETE FROM "PeopleEpisodes" WHERE personid = $1"#) + .bind(person_id) + .execute(pool) + .await; + } + DatabasePool::MySQL(pool) => { + let _ = sqlx::query("DELETE FROM PeopleEpisodes WHERE PersonID = ?") + .bind(person_id) + .execute(pool) + .await; + } + } } - }; + + println!("Successfully unsubscribed from person {}", person_id); + Ok(true) + } else { + println!("Person subscription not found for user {} and person {}", user_id, person_id); + Ok(false) + } + } + + // Get person subscriptions - matches Python get_person_subscriptions function exactly + pub async fn get_person_subscriptions(&self, user_id: i32) -> AppResult> { + println!("Getting person subscriptions for user {}", user_id); - let total_count = episode_count + youtube_count; + let mut subscriptions = Vec::new(); - // Update podcast episode count match self { DatabasePool::Postgres(pool) => { - sqlx::query(r#"UPDATE "Podcasts" SET episodecount = $2 WHERE podcastid = $1"#) - .bind(podcast_id) - .bind(total_count as i32) - .execute(pool) + let rows = sqlx::query(r#" + SELECT personid, userid, name, personimg, peopledbid, associatedpodcasts + FROM "People" + WHERE userid = $1 + ORDER BY name + "#) + .bind(user_id) + .fetch_all(pool) .await?; + + for row in rows { + let person_id = row.try_get::("personid")?; + let associated_podcasts_str = row.try_get::, _>("associatedpodcasts")?; + + // Count associated podcasts by splitting the comma-separated string and filtering out empty/0 values + let podcast_count = if let Some(podcasts_str) = &associated_podcasts_str { + if podcasts_str.is_empty() { + 0 + } else { + podcasts_str.split(',') + .filter(|s| !s.trim().is_empty() && s.trim() != "0") + .count() + } + } else { + 0 + }; + + // Count episodes for this person from PeopleEpisodes table + let episode_count: i64 = sqlx::query_scalar(r#" + SELECT COUNT(*) + FROM "PeopleEpisodes" pe + INNER JOIN "Podcasts" p ON pe.podcastid = p.podcastid + WHERE pe.personid = $1 AND p.userid = $2 + "#) + .bind(person_id) + .bind(user_id) + .fetch_one(pool) + .await + .unwrap_or(0); + + let subscription = serde_json::json!({ + "personid": person_id, + "userid": row.try_get::("userid")?, + "name": row.try_get::("name")?, + "image": row.try_get::("personimg")?, + "peopledbid": row.try_get::("peopledbid")?, + "associatedpodcasts": podcast_count, + "episode_count": episode_count + }); + subscriptions.push(subscription); + } } DatabasePool::MySQL(pool) => { - sqlx::query("UPDATE Podcasts SET EpisodeCount = ? WHERE PodcastID = ?") - .bind(total_count as i32) - .bind(podcast_id) - .execute(pool) + let rows = sqlx::query(" + SELECT PersonID, UserID, Name, PersonImg, PeopleDBID, AssociatedPodcasts + FROM People + WHERE UserID = ? + ORDER BY Name + ") + .bind(user_id) + .fetch_all(pool) .await?; + + for row in rows { + let person_id = row.try_get::("PersonID")?; + let associated_podcasts_str = row.try_get::, _>("AssociatedPodcasts")?; + + // Count associated podcasts by splitting the comma-separated string and filtering out empty/0 values + let podcast_count = if let Some(podcasts_str) = &associated_podcasts_str { + if podcasts_str.is_empty() { + 0 + } else { + podcasts_str.split(',') + .filter(|s| !s.trim().is_empty() && s.trim() != "0") + .count() + } + } else { + 0 + }; + + // Count episodes for this person from PeopleEpisodes table + let episode_count: i64 = sqlx::query_scalar(" + SELECT COUNT(*) + FROM PeopleEpisodes pe + INNER JOIN Podcasts p ON pe.PodcastID = p.PodcastID + WHERE pe.PersonID = ? AND p.UserID = ? + ") + .bind(person_id) + .bind(user_id) + .fetch_one(pool) + .await + .unwrap_or(0); + + let subscription = serde_json::json!({ + "personid": person_id, + "userid": row.try_get::("UserID")?, + "name": row.try_get::("Name")?, + "image": row.try_get::("PersonImg")?, + "peopledbid": row.try_get::("PeopleDBID")?, + "associatedpodcasts": podcast_count, + "episode_count": episode_count + }); + subscriptions.push(subscription); + } } } - println!("Updated episode count to {} ({} episodes + {} videos)", total_count, episode_count, youtube_count); - Ok(()) + println!("Found {} person subscriptions for user {}", subscriptions.len(), user_id); + Ok(subscriptions) } - - // Get user history - matches Python user_history function exactly with YouTube UNION - pub async fn user_history(&self, user_id: i32) -> AppResult> { + + // Get person episodes - matches Python return_person_episodes function exactly + pub async fn get_person_episodes(&self, user_id: i32, person_id: i32) -> AppResult> { + println!("Getting episodes for user {} and person {}", user_id, person_id); + + let mut episodes = Vec::new(); + match self { DatabasePool::Postgres(pool) => { - let rows = sqlx::query( - r#"SELECT * FROM ( - SELECT - "Episodes".EpisodeID as episodeid, - "UserEpisodeHistory".ListenDate as listendate, - "UserEpisodeHistory".ListenDuration as listenduration, - "Episodes".EpisodeTitle as episodetitle, - "Episodes".EpisodeDescription as episodedescription, - "Episodes".EpisodeArtwork as episodeartwork, - "Episodes".EpisodeURL as episodeurl, - "Episodes".EpisodeDuration as episodeduration, - "Podcasts".PodcastName as podcastname, - "Episodes".EpisodePubDate as episodepubdate, - "Episodes".Completed as completed, - FALSE as is_youtube - FROM "UserEpisodeHistory" - JOIN "Episodes" ON "UserEpisodeHistory".EpisodeID = "Episodes".EpisodeID - JOIN "Podcasts" ON "Episodes".PodcastID = "Podcasts".PodcastID - WHERE "UserEpisodeHistory".UserID = $1 + let rows = sqlx::query(r#" + SELECT + e.episodeid, -- Will be NULL if no match in Episodes table + pe.episodetitle, + pe.episodedescription, + pe.episodeurl, + CASE + WHEN pe.episodeartwork IS NULL THEN + (SELECT artworkurl FROM "Podcasts" WHERE podcastid = pe.podcastid) + ELSE + CASE + WHEN p.usepodcastcoverscustomized = TRUE AND p.usepodcastcovers = TRUE THEN p.artworkurl + WHEN u.usepodcastcovers = TRUE THEN p.artworkurl + ELSE pe.episodeartwork + END + END as episodeartwork, + pe.episodepubdate, + pe.episodeduration, + p.podcastname, + CASE + WHEN ( + SELECT 1 FROM "Podcasts" + WHERE podcastid = pe.podcastid + AND userid = $1 + ) IS NOT NULL THEN + CASE + WHEN s.episodeid IS NOT NULL THEN TRUE + ELSE FALSE + END + ELSE FALSE + END AS saved, + CASE + WHEN ( + SELECT 1 FROM "Podcasts" + WHERE podcastid = pe.podcastid + AND userid = $1 + ) IS NOT NULL THEN + CASE + WHEN d.episodeid IS NOT NULL THEN TRUE + ELSE FALSE + END + ELSE FALSE + END AS downloaded, + CASE + WHEN ( + SELECT 1 FROM "Podcasts" + WHERE podcastid = pe.podcastid + AND userid = $1 + ) IS NOT NULL THEN + COALESCE(h.listenduration, 0) + ELSE 0 + END AS listenduration, + FALSE as is_youtube + FROM "PeopleEpisodes" pe + INNER JOIN "People" pp ON pe.personid = pp.personid + INNER JOIN "Podcasts" p ON pe.podcastid = p.podcastid + LEFT JOIN "Users" u ON p.userid = u.userid + LEFT JOIN "Episodes" e ON e.episodeurl = pe.episodeurl AND e.podcastid = pe.podcastid + LEFT JOIN ( + SELECT * FROM "SavedEpisodes" WHERE userid = $2 + ) s ON s.episodeid = e.episodeid + LEFT JOIN ( + SELECT * FROM "DownloadedEpisodes" WHERE userid = $3 + ) d ON d.episodeid = e.episodeid + LEFT JOIN ( + SELECT * FROM "UserEpisodeHistory" WHERE userid = $4 + ) h ON h.episodeid = e.episodeid + WHERE pe.personid = $5 + AND pe.episodepubdate >= NOW() - INTERVAL '30 days' + ORDER BY pe.episodepubdate DESC + "#) + .bind(user_id) // $1 + .bind(user_id) // $2 + .bind(user_id) // $3 + .bind(user_id) // $4 + .bind(person_id) // $5 + .fetch_all(pool) + .await?; + + for row in rows { + let episodeid = row.try_get::, _>("episodeid")?; + let episodetitle = row.try_get::("episodetitle")?; + let episodedescription = row.try_get::("episodedescription")?; + let episodeurl = row.try_get::("episodeurl")?; + let episodeartwork = row.try_get::, _>("episodeartwork")?; + let dt = row.try_get::("episodepubdate")?; + let episodepubdate = dt.format("%Y-%m-%dT%H:%M:%S").to_string(); + let episodeduration = row.try_get::("episodeduration")?; + let podcastname = row.try_get::("podcastname")?; + let saved = row.try_get::("saved")?; + let downloaded = row.try_get::("downloaded")?; + let listenduration = row.try_get::("listenduration")?; + let is_youtube = row.try_get::("is_youtube")?; - UNION ALL - - SELECT - "YouTubeVideos".VideoID as episodeid, - NULL as listendate, - "YouTubeVideos".ListenPosition as listenduration, - "YouTubeVideos".VideoTitle as episodetitle, - "YouTubeVideos".VideoDescription as episodedescription, - "YouTubeVideos".ThumbnailURL as episodeartwork, - "YouTubeVideos".VideoURL as episodeurl, - "YouTubeVideos".Duration as episodeduration, - "Podcasts".PodcastName as podcastname, - "YouTubeVideos".PublishedAt as episodepubdate, - "YouTubeVideos".Completed as completed, - TRUE as is_youtube - FROM "YouTubeVideos" - JOIN "Podcasts" ON "YouTubeVideos".PodcastID = "Podcasts".PodcastID - WHERE "YouTubeVideos".ListenPosition > 0 - AND "Podcasts".UserID = $1 - ) combined_results - ORDER BY listendate DESC NULLS LAST"# - ) - .bind(user_id) - .fetch_all(pool) - .await?; - - let mut episodes = Vec::new(); - for row in rows { - let listendate = row.try_get::, _>("listendate")? - .map(|dt| dt.format("%Y-%m-%dT%H:%M:%S").to_string()); - let episodepubdate = row.try_get::, _>("episodepubdate")? - .map(|dt| dt.format("%Y-%m-%dT%H:%M:%S").to_string()); - - episodes.push(serde_json::json!({ - "episodeid": row.get::, _>("episodeid"), - "listendate": listendate, - "listenduration": row.get::, _>("listenduration"), - "episodetitle": row.get::, _>("episodetitle"), - "episodedescription": row.get::, _>("episodedescription"), - "episodeartwork": row.get::, _>("episodeartwork"), - "episodeurl": row.get::, _>("episodeurl"), - "episodeduration": row.get::, _>("episodeduration"), - "podcastname": row.get::, _>("podcastname"), + let episode = serde_json::json!({ + "episodeid": episodeid.unwrap_or(-1), + "episodetitle": episodetitle, + "episodedescription": episodedescription, + "episodeurl": episodeurl, + "episodeartwork": episodeartwork, "episodepubdate": episodepubdate, - "completed": row.get::, _>("completed"), - "is_youtube": row.get::, _>("is_youtube") - })); + "episodeduration": episodeduration, + "podcastname": podcastname, + "saved": saved, + "downloaded": downloaded, + "listenduration": listenduration, + "is_youtube": is_youtube + }); + episodes.push(episode); } - Ok(episodes) } DatabasePool::MySQL(pool) => { - let rows = sqlx::query( - "SELECT * FROM ( - SELECT - e.EpisodeID as episodeid, - ueh.ListenDate as listendate, - ueh.ListenDuration as listenduration, - e.EpisodeTitle as episodetitle, - e.EpisodeDescription as episodedescription, - e.EpisodeArtwork as episodeartwork, - e.EpisodeURL as episodeurl, - e.EpisodeDuration as episodeduration, - p.PodcastName as podcastname, - e.EpisodePubDate as episodepubdate, - e.Completed as completed, - 0 as is_youtube - FROM UserEpisodeHistory ueh - JOIN Episodes e ON ueh.EpisodeID = e.EpisodeID - JOIN Podcasts p ON e.PodcastID = p.PodcastID - WHERE ueh.UserID = ? - - UNION ALL - - SELECT - yv.VideoID as episodeid, - NULL as listendate, - yv.ListenPosition as listenduration, - yv.VideoTitle as episodetitle, - yv.VideoDescription as episodedescription, - yv.ThumbnailURL as episodeartwork, - yv.VideoURL as episodeurl, - yv.Duration as episodeduration, - p.PodcastName as podcastname, - yv.PublishedAt as episodepubdate, - yv.Completed as completed, - 1 as is_youtube - FROM YouTubeVideos yv - JOIN Podcasts p ON yv.PodcastID = p.PodcastID - WHERE yv.ListenPosition > 0 - AND p.UserID = ? - ) combined_results - ORDER BY listendate DESC" - ) - .bind(user_id) - .bind(user_id) - .fetch_all(pool) - .await?; - - let mut episodes = Vec::new(); + let rows = sqlx::query(" + SELECT + e.EpisodeID, -- Will be NULL if no match in Episodes table + pe.EpisodeTitle, + pe.EpisodeDescription, + pe.EpisodeURL, + CASE + WHEN pe.EpisodeArtwork IS NULL THEN p.ArtworkURL + ELSE + CASE + WHEN p.UsePodcastCoversCustomized = TRUE AND p.UsePodcastCovers = TRUE THEN p.ArtworkURL + WHEN u.UsePodcastCovers = TRUE THEN p.ArtworkURL + ELSE pe.EpisodeArtwork + END + END as EpisodeArtwork, + pe.EpisodePubDate, + pe.EpisodeDuration, + p.PodcastName, + IF( + EXISTS( + SELECT 1 FROM Podcasts + WHERE PodcastID = pe.PodcastID + AND UserID = ? + ), + IF(s.EpisodeID IS NOT NULL, TRUE, FALSE), + FALSE + ) AS Saved, + IF( + EXISTS( + SELECT 1 FROM Podcasts + WHERE PodcastID = pe.PodcastID + AND UserID = ? + ), + IF(d.EpisodeID IS NOT NULL, TRUE, FALSE), + FALSE + ) AS Downloaded, + IF( + EXISTS( + SELECT 1 FROM Podcasts + WHERE PodcastID = pe.PodcastID + AND UserID = ? + ), + COALESCE(h.ListenDuration, 0), + 0 + ) AS ListenDuration, + FALSE as is_youtube + FROM PeopleEpisodes pe + INNER JOIN People pp ON pe.PersonID = pp.PersonID + INNER JOIN Podcasts p ON pe.PodcastID = p.PodcastID + LEFT JOIN Users u ON p.UserID = u.UserID + LEFT JOIN Episodes e ON e.EpisodeURL = pe.EpisodeURL AND e.PodcastID = pe.PodcastID + LEFT JOIN ( + SELECT * FROM SavedEpisodes WHERE UserID = ? + ) s ON s.EpisodeID = e.EpisodeID + LEFT JOIN ( + SELECT * FROM DownloadedEpisodes WHERE UserID = ? + ) d ON d.EpisodeID = e.EpisodeID + LEFT JOIN ( + SELECT * FROM UserEpisodeHistory WHERE UserID = ? + ) h ON h.EpisodeID = e.EpisodeID + WHERE pe.PersonID = ? + AND pe.EpisodePubDate >= DATE_SUB(NOW(), INTERVAL 30 DAY) + ORDER BY pe.EpisodePubDate DESC + ") + .bind(user_id) // 1st ? + .bind(user_id) // 2nd ? + .bind(user_id) // 3rd ? + .bind(user_id) // 4th ? + .bind(user_id) // 5th ? + .bind(user_id) // 6th ? + .bind(person_id) // 7th ? + .fetch_all(pool) + .await?; + for row in rows { - let listendate = row.try_get::, _>("listendate")? - .map(|dt| dt.format("%Y-%m-%dT%H:%M:%S").to_string()); - let episodepubdate = row.try_get::, _>("episodepubdate")? - .map(|dt| dt.format("%Y-%m-%dT%H:%M:%S").to_string()); - - episodes.push(serde_json::json!({ - "episodeid": row.get::, _>("episodeid"), - "listendate": listendate, - "listenduration": row.get::, _>("listenduration"), - "episodetitle": row.get::, _>("episodetitle"), - "episodedescription": row.get::, _>("episodedescription"), - "episodeartwork": row.get::, _>("episodeartwork"), - "episodeurl": row.get::, _>("episodeurl"), - "episodeduration": row.get::, _>("episodeduration"), - "podcastname": row.get::, _>("podcastname"), + let episodeid = row.try_get::, _>("EpisodeID")?; + let episodetitle = row.try_get::("EpisodeTitle")?; + let episodedescription = row.try_get::("EpisodeDescription")?; + let episodeurl = row.try_get::("EpisodeURL")?; + let episodeartwork = row.try_get::, _>("EpisodeArtwork")?; + let dt = row.try_get::("EpisodePubDate")?; + let episodepubdate = dt.format("%Y-%m-%dT%H:%M:%S").to_string(); + let episodeduration = row.try_get::("EpisodeDuration")?; + let podcastname = row.try_get::("PodcastName")?; + let saved = row.try_get::("Saved")?; + let downloaded = row.try_get::("Downloaded")?; + let listenduration = row.try_get::("ListenDuration")?; + let is_youtube = row.try_get::("is_youtube")?; + + let episode = serde_json::json!({ + "episodeid": episodeid.unwrap_or(-1), + "episodetitle": episodetitle, + "episodedescription": episodedescription, + "episodeurl": episodeurl, + "episodeartwork": episodeartwork, "episodepubdate": episodepubdate, - "completed": row.get::, _>("completed"), - "is_youtube": row.get::, _>("is_youtube") - })); + "episodeduration": episodeduration, + "podcastname": podcastname, + "saved": saved, + "downloaded": downloaded, + "listenduration": listenduration, + "is_youtube": is_youtube + }); + episodes.push(episode); } - Ok(episodes) } } + + println!("Found {} episodes for user {} and person {}", episodes.len(), user_id, person_id); + Ok(episodes) } - - // Increment listen time - matches Python increment_listen_time function exactly - pub async fn increment_listen_time(&self, user_id: i32) -> AppResult<()> { + + // Check existing YouTube channel subscription - matches Python check_existing_channel_subscription function exactly + pub async fn check_existing_channel_subscription(&self, channel_id: &str, user_id: i32) -> AppResult> { + println!("Checking existing channel subscription for {} and user {}", channel_id, user_id); + match self { DatabasePool::Postgres(pool) => { - sqlx::query(r#"UPDATE "UserStats" SET TimeListened = TimeListened + 1 WHERE UserID = $1"#) + let row = sqlx::query(r#"SELECT podcastid FROM "Podcasts" WHERE feedurl = $1 AND userid = $2"#) + .bind(format!("https://www.youtube.com/channel/{}", channel_id)) .bind(user_id) - .execute(pool) + .fetch_optional(pool) .await?; - Ok(()) + + if let Some(row) = row { + let podcast_id: i32 = row.try_get("podcastid")?; + println!("Found existing subscription with ID: {}", podcast_id); + Ok(Some(podcast_id)) + } else { + println!("No existing subscription found"); + Ok(None) + } } DatabasePool::MySQL(pool) => { - sqlx::query("UPDATE UserStats SET TimeListened = TimeListened + 1 WHERE UserID = ?") + let row = sqlx::query("SELECT PodcastID FROM Podcasts WHERE FeedURL = ? AND UserID = ?") + .bind(format!("https://www.youtube.com/channel/{}", channel_id)) .bind(user_id) - .execute(pool) + .fetch_optional(pool) .await?; - Ok(()) + + if let Some(row) = row { + let podcast_id: i32 = row.try_get("podcastid")?; + println!("Found existing subscription with ID: {}", podcast_id); + Ok(Some(podcast_id)) + } else { + println!("No existing subscription found"); + Ok(None) + } } } } - - // Get playback speed - matches Python get_playback_speed function exactly - pub async fn get_playback_speed(&self, user_id: i32, is_youtube: bool, podcast_id: Option) -> AppResult { - match self { - DatabasePool::Postgres(pool) => { - let query = if let Some(pod_id) = podcast_id { - r#"SELECT PlaybackSpeed FROM "Podcasts" WHERE PodcastID = $1"# - } else { - r#"SELECT PlaybackSpeed FROM "Users" WHERE UserID = $1"# - }; - - let param = podcast_id.unwrap_or(user_id); - let row = sqlx::query(query) - .bind(param) + + // Add YouTube channel - matches Python add_youtube_channel function exactly + pub async fn add_youtube_channel(&self, channel_info: &std::collections::HashMap, user_id: i32, feed_cutoff: i32) -> AppResult { + println!("Adding YouTube channel to database for user {}", user_id); + + let channel_id = channel_info.get("channel_id").ok_or_else(|| AppError::bad_request("Channel ID is required"))?; + let empty_string = String::new(); + let name = channel_info.get("name").unwrap_or(&empty_string); + let description = channel_info.get("description").unwrap_or(&empty_string); + let thumbnail_url = channel_info.get("thumbnail_url").unwrap_or(&empty_string); + let feed_url = format!("https://www.youtube.com/channel/{}", channel_id); + + // Insert new YouTube channel as podcast + let podcast_id = match self { + DatabasePool::Postgres(pool) => { + let row = sqlx::query(r#" + INSERT INTO "Podcasts" ( + userid, podcastname, artworkurl, description, episodecount, + websiteurl, feedurl, author, categories, explicit, podcastindexid, feedcutoffdays, isyoutubechannel + ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13) + RETURNING podcastid + "#) + .bind(user_id) + .bind(name) + .bind(thumbnail_url) + .bind(description) + .bind(0) // Initial episode count + .bind(&feed_url) + .bind(&feed_url) + .bind(name) // Use channel name as author + .bind("{}") // Empty categories for YouTube + .bind(false) // Not explicit by default + .bind(0) // No podcast index ID for YouTube + .bind(feed_cutoff) + .bind(true) // Is YouTube channel .fetch_one(pool) .await?; - - Ok(row.try_get::("PlaybackSpeed").unwrap_or(1.0)) + + row.try_get("podcastid")? } DatabasePool::MySQL(pool) => { - let query = if let Some(pod_id) = podcast_id { - "SELECT PlaybackSpeed FROM Podcasts WHERE PodcastID = ?" - } else { - "SELECT PlaybackSpeed FROM Users WHERE UserID = ?" - }; - - let param = podcast_id.unwrap_or(user_id); - let row = sqlx::query(query) - .bind(param) - .fetch_one(pool) + let result = sqlx::query(r#" + INSERT INTO Podcasts ( + UserID, PodcastName, ArtworkURL, Description, EpisodeCount, + WebsiteURL, FeedURL, Author, Categories, Explicit, PodcastIndexID, FeedCutoffDays, IsYouTubeChannel + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + "#) + .bind(user_id) + .bind(name) + .bind(thumbnail_url) + .bind(description) + .bind(0) // Initial episode count + .bind(&feed_url) + .bind(&feed_url) + .bind(name) // Use channel name as author + .bind("{}") // Empty categories for YouTube + .bind(false) // Not explicit by default + .bind(0) // No podcast index ID for YouTube + .bind(feed_cutoff) + .bind(true) // Is YouTube channel + .execute(pool) .await?; - - Ok(row.try_get::("PlaybackSpeed").unwrap_or(1.0)) + + result.last_insert_id() as i32 } - } - } - - // Add news feed if not already added - matches Python add_news_feed_if_not_added function exactly - - // Cleanup old episodes - matches Python cleanup_old_episodes function exactly - pub async fn cleanup_old_episodes(&self) -> AppResult<()> { - self.cleanup_old_people_episodes(30).await?; - self.cleanup_expired_shared_episodes().await?; - Ok(()) - } - - // Cleanup old people episodes - matches Python cleanup_old_people_episodes function exactly - pub async fn cleanup_old_people_episodes(&self, days: i32) -> AppResult<()> { - let cutoff_date = chrono::Utc::now() - chrono::Duration::days(days as i64); + }; + // Update UserStats PodcastsAdded counter match self { DatabasePool::Postgres(pool) => { - let result = sqlx::query(r#"DELETE FROM "PeopleEpisodes" WHERE addeddate < $1"#) - .bind(cutoff_date) + let _ = sqlx::query(r#" + UPDATE "UserStats" + SET podcastsadded = podcastsadded + 1 + WHERE userid = $1 + "#) + .bind(user_id) .execute(pool) - .await?; - - tracing::info!("Cleaned up {} old PeopleEpisodes records older than {} days", result.rows_affected(), days); + .await; } DatabasePool::MySQL(pool) => { - let result = sqlx::query("DELETE FROM PeopleEpisodes WHERE Created < ?") - .bind(cutoff_date) + let _ = sqlx::query("UPDATE UserStats SET PodcastsAdded = PodcastsAdded + 1 WHERE UserID = ?") + .bind(user_id) .execute(pool) - .await?; - - tracing::info!("Cleaned up {} old PeopleEpisodes records older than {} days", result.rows_affected(), days); + .await; } } - Ok(()) + + println!("Successfully added YouTube channel with ID: {}", podcast_id); + Ok(podcast_id) } - // Cleanup expired shared episodes - matches Python cleanup_expired_shared_episodes function exactly - pub async fn cleanup_expired_shared_episodes(&self) -> AppResult<()> { - let now = chrono::Utc::now(); - + // Check if YouTube channel already exists - matches Python check_youtube_channel function exactly + pub async fn check_youtube_channel(&self, user_id: i32, channel_name: &str, channel_url: &str) -> AppResult { match self { DatabasePool::Postgres(pool) => { - let result = sqlx::query(r#"DELETE FROM "SharedEpisodes" WHERE expirationdate < $1"#) - .bind(now) - .execute(pool) + let row = sqlx::query(r#" + SELECT podcastid FROM "Podcasts" + WHERE userid = $1 AND podcastname = $2 AND feedurl = $3 AND isyoutubechannel = TRUE + "#) + .bind(user_id) + .bind(channel_name) + .bind(channel_url) + .fetch_optional(pool) .await?; - tracing::info!("Cleaned up {} expired SharedEpisodes records", result.rows_affected()); + Ok(row.is_some()) } DatabasePool::MySQL(pool) => { - let result = sqlx::query("DELETE FROM SharedEpisodes WHERE ExpirationDate < ?") - .bind(now) - .execute(pool) + let row = sqlx::query(r#" + SELECT PodcastID FROM Podcasts + WHERE UserID = ? AND PodcastName = ? AND FeedURL = ? AND IsYouTubeChannel = TRUE + "#) + .bind(user_id) + .bind(channel_name) + .bind(channel_url) + .fetch_optional(pool) .await?; - tracing::info!("Cleaned up {} expired SharedEpisodes records", result.rows_affected()); + Ok(row.is_some()) } } - Ok(()) } + + // Remove old YouTube videos - deletes videos and all their references from dependent tables + pub async fn remove_old_youtube_videos(&self, podcast_id: i32, cutoff_date: chrono::DateTime) -> AppResult<()> { + println!("Removing old YouTube videos for podcast {} before {}", podcast_id, cutoff_date); - // Update all playlists - matches Python update_all_playlists function exactly - pub async fn update_all_playlists(&self) -> AppResult<()> { - tracing::info!("=================== PLAYLIST UPDATE STARTING ==================="); - tracing::info!("Starting to fetch all playlists"); + let cutoff_naive = cutoff_date.naive_utc(); + + let rows_affected = match self { + DatabasePool::Postgres(pool) => { + // First, delete all references from dependent tables + let cleanup_queries = vec![ + r#"DELETE FROM "UserVideoHistory" WHERE videoid IN (SELECT videoid FROM "YouTubeVideos" WHERE podcastid = $1 AND publishedat < $2)"#, + r#"DELETE FROM "SavedVideos" WHERE videoid IN (SELECT videoid FROM "YouTubeVideos" WHERE podcastid = $1 AND publishedat < $2)"#, + r#"DELETE FROM "DownloadedVideos" WHERE videoid IN (SELECT videoid FROM "YouTubeVideos" WHERE podcastid = $1 AND publishedat < $2)"#, + r#"DELETE FROM "PlaylistContents" WHERE videoid IN (SELECT videoid FROM "YouTubeVideos" WHERE podcastid = $1 AND publishedat < $2)"#, + r#"DELETE FROM "EpisodeQueue" WHERE episodeid IN (SELECT videoid FROM "YouTubeVideos" WHERE podcastid = $1 AND publishedat < $2)"#, + ]; + + for query in cleanup_queries { + sqlx::query(query) + .bind(podcast_id) + .bind(cutoff_naive) + .execute(pool) + .await?; + } + + // Now delete the videos themselves + sqlx::query(r#"DELETE FROM "YouTubeVideos" WHERE podcastid = $1 AND publishedat < $2"#) + .bind(podcast_id) + .bind(cutoff_naive) + .execute(pool) + .await? + .rows_affected() + } + DatabasePool::MySQL(pool) => { + // First, delete all references from dependent tables + let cleanup_queries = vec![ + "DELETE FROM UserVideoHistory WHERE VideoID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = ? AND PublishedAt < ?)", + "DELETE FROM SavedVideos WHERE VideoID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = ? AND PublishedAt < ?)", + "DELETE FROM DownloadedVideos WHERE VideoID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = ? AND PublishedAt < ?)", + "DELETE FROM PlaylistContents WHERE VideoID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = ? AND PublishedAt < ?)", + "DELETE FROM EpisodeQueue WHERE EpisodeID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = ? AND PublishedAt < ?)", + ]; + + for query in cleanup_queries { + sqlx::query(query) + .bind(podcast_id) + .bind(cutoff_naive) + .execute(pool) + .await?; + } + + // Now delete the videos themselves + sqlx::query("DELETE FROM YouTubeVideos WHERE PodcastID = ? AND PublishedAt < ?") + .bind(podcast_id) + .bind(cutoff_naive) + .execute(pool) + .await? + .rows_affected() + } + }; + + println!("Removed {} old YouTube videos", rows_affected); + Ok(()) + } + + // Get existing YouTube videos - matches Python get_existing_youtube_videos function exactly + pub async fn get_existing_youtube_videos(&self, podcast_id: i32) -> AppResult> { + println!("Getting existing YouTube videos for podcast {}", podcast_id); + + let mut video_urls = Vec::new(); match self { DatabasePool::Postgres(pool) => { - let playlists = sqlx::query(r#" - SELECT playlistid, name, userid, podcastids, includeunplayed, - includepartiallyplayed, includeplayed, playprogressmin, playprogressmax, - timefilterhours, minduration, maxduration, sortorder, - groupbypodcast, maxepisodes - FROM "Playlists" - "#) + let rows = sqlx::query(r#"SELECT videourl FROM "YouTubeVideos" WHERE podcastid = $1"#) + .bind(podcast_id) .fetch_all(pool) .await?; - tracing::info!("Found {} playlists to update", playlists.len()); - - for playlist in playlists { - let playlist_id: i32 = playlist.try_get("playlistid")?; - let playlist_name: String = playlist.try_get("name")?; - let user_id: i32 = playlist.try_get("userid")?; - - tracing::info!("Updating playlist: {} (ID: {}, User: {})", playlist_name, playlist_id, user_id); - - match self.update_playlist_contents(playlist_id).await { - Ok(episode_count) => { - tracing::info!("Successfully updated playlist '{}': {} episodes", playlist_name, episode_count); - } - Err(e) => { - tracing::error!("Failed to update playlist '{}' (ID: {}): {}", playlist_name, playlist_id, e); - // Continue with other playlists - } - } + for row in rows { + let url: String = row.try_get("videourl")?; + video_urls.push(url); } } DatabasePool::MySQL(pool) => { - let playlists = sqlx::query(" - SELECT PlaylistID, PlaylistName, UserID, PodcastIds, IncludeUnplayed, - IncludePartiallyPlayed, IncludePlayed, PlayProgressMin, PlayProgressMax, - TimeFilterHours, MinDuration, MaxDuration, SortOrder, - GroupByPodcast, MaxEpisodes - FROM Playlists - ") + let rows = sqlx::query("SELECT VideoURL FROM YouTubeVideos WHERE PodcastID = ?") + .bind(podcast_id) .fetch_all(pool) .await?; - tracing::info!("Found {} playlists to update", playlists.len()); - - for playlist in playlists { - let playlist_id: i32 = playlist.try_get("PlaylistID")?; - let playlist_name: String = playlist.try_get("PlaylistName")?; - let user_id: i32 = playlist.try_get("UserID")?; - - tracing::info!("Updating playlist: {} (ID: {}, User: {})", playlist_name, playlist_id, user_id); - - match self.update_playlist_contents(playlist_id).await { - Ok(episode_count) => { - tracing::info!("Successfully updated playlist '{}': {} episodes", playlist_name, episode_count); - } - Err(e) => { - tracing::error!("Failed to update playlist '{}' (ID: {}): {}", playlist_name, playlist_id, e); - // Continue with other playlists - } - } + for row in rows { + let url: String = row.try_get("VideoURL")?; + video_urls.push(url); } } } - tracing::info!("=================== PLAYLIST UPDATE COMPLETED ==================="); + println!("Found {} existing videos", video_urls.len()); + Ok(video_urls) + } + + // Add YouTube videos - matches Python add_youtube_videos function exactly + pub async fn add_youtube_videos(&self, podcast_id: i32, videos: &[serde_json::Value]) -> AppResult<()> { + println!("Adding {} YouTube videos for podcast {}", videos.len(), podcast_id); + + for video in videos { + let video_id = video.get("id").and_then(|v| v.as_str()).unwrap_or(""); + let title = video.get("title").and_then(|v| v.as_str()).unwrap_or(""); + let description = video.get("description").and_then(|v| v.as_str()).unwrap_or(""); + let url = video.get("url").and_then(|v| v.as_str()).unwrap_or(""); + let thumbnail = video.get("thumbnail").and_then(|v| v.as_str()).unwrap_or(""); + + println!("Processing video {} for database insertion", video_id); + println!("Video data: {:?}", video); + + let duration = if let Some(duration_str) = video.get("duration").and_then(|v| v.as_str()) { + println!("Duration as string: '{}'", duration_str); + let parsed = crate::handlers::youtube::parse_youtube_duration(duration_str).unwrap_or(0) as i32; + println!("Parsed duration: {}", parsed); + parsed + } else { + let int_duration = video.get("duration").and_then(|v| v.as_i64()).unwrap_or(0) as i32; + println!("Duration as integer: {}", int_duration); + int_duration + }; + + // Parse publish date + let publish_date = if let Some(date_str) = video.get("publish_date").and_then(|v| v.as_str()) { + chrono::DateTime::parse_from_rfc3339(date_str) + .map(|dt| dt.naive_utc()) + .unwrap_or_else(|_| chrono::Utc::now().naive_utc()) + } else { + chrono::Utc::now().naive_utc() + }; + + match self { + DatabasePool::Postgres(pool) => { + let _ = sqlx::query(r#" + INSERT INTO "YouTubeVideos" ( + podcastid, youtubevideoid, videotitle, videodescription, videourl, + thumbnailurl, publishedat, duration, completed, listenposition + ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) + "#) + .bind(podcast_id) + .bind(video_id) + .bind(title) + .bind(description) + .bind(url) + .bind(thumbnail) + .bind(publish_date) + .bind(duration) + .bind(false) // Not completed + .bind(0) // Listen position 0 + .execute(pool) + .await; + } + DatabasePool::MySQL(pool) => { + let _ = sqlx::query(r#" + INSERT IGNORE INTO YouTubeVideos ( + PodcastID, YouTubeVideoID, VideoTitle, VideoDescription, VideoURL, + ThumbnailURL, PublishedAt, Duration, Completed, ListenPosition + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + "#) + .bind(podcast_id) + .bind(video_id) + .bind(title) + .bind(description) + .bind(url) + .bind(thumbnail) + .bind(publish_date) + .bind(duration) + .bind(false) // Not completed + .bind(0) // Listen position 0 + .execute(pool) + .await; + } + } + } + + println!("Successfully added {} YouTube videos", videos.len()); Ok(()) } + + // Get video date using web scraping - matches Python get_video_date function exactly + pub async fn get_video_date(&self, video_id: &str) -> AppResult> { + let client = reqwest::Client::new(); + let url = format!("https://www.youtube.com/watch?v={}", video_id); + + let response = client.get(&url) + .header("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36") + .send() + .await + .map_err(|e| AppError::external_error(&format!("Failed to fetch video page: {}", e)))?; + + let html = response.text().await + .map_err(|e| AppError::external_error(&format!("Failed to read response: {}", e)))?; + + // Parse HTML to find upload date (simplified version of Python's BeautifulSoup approach) + if let Some(start) = html.find("\"uploadDate\":\"") { + let date_start = start + "\"uploadDate\":\"".len(); + if let Some(end) = html[date_start..].find("\"") { + let date_str = &html[date_start..date_start + end]; + if let Ok(parsed_date) = chrono::DateTime::parse_from_rfc3339(date_str) { + return Ok(parsed_date.with_timezone(&chrono::Utc)); + } + } + } + + // Fallback to current time minus some hours if date not found + Ok(chrono::Utc::now() - chrono::Duration::hours(1)) + } - // Update playlist contents - matches Python update_playlist_contents function exactly - pub async fn update_playlist_contents(&self, playlist_id: i32) -> AppResult { - tracing::info!("======= UPDATE PLAYLIST ID: {} =======", playlist_id); + // Update episode count for podcast - matches Python update_episode_count function exactly + pub async fn update_episode_count(&self, podcast_id: i32) -> AppResult<()> { + println!("Updating episode count for podcast {}", podcast_id); - match self { + // Count episodes and YouTube videos + let (episode_count, youtube_count) = match self { DatabasePool::Postgres(pool) => { - // Get playlist configuration first - let playlist = sqlx::query(r#" - SELECT playlistid, name, userid, podcastids, includeunplayed, - includepartiallyplayed, includeplayed, playprogressmin, playprogressmax, - timefilterhours, minduration, maxduration, sortorder, - groupbypodcast, maxepisodes, issystemplaylist - FROM "Playlists" WHERE playlistid = $1 - "#) - .bind(playlist_id) + let episode_row = sqlx::query(r#"SELECT COUNT(*) as count FROM "Episodes" WHERE podcastid = $1"#) + .bind(podcast_id) .fetch_one(pool) .await?; + let episode_count: i64 = episode_row.try_get("count")?; - // Clear existing contents - sqlx::query(r#"DELETE FROM "PlaylistContents" WHERE playlistid = $1"#) - .bind(playlist_id) - .execute(pool) - .await?; - - // Handle special playlists - let playlist_name: String = playlist.try_get("name")?; - let user_id: i32 = playlist.try_get("userid")?; - let is_system_playlist: bool = playlist.try_get("issystemplaylist").unwrap_or(false); - - let episode_count = if playlist_name == "Fresh Releases" && is_system_playlist { - // Special handling for Fresh Releases - self.update_fresh_releases_playlist_postgres(pool, playlist_id).await? - } else { - // Standard playlist query building - self.build_and_execute_playlist_query_postgres(pool, &playlist).await? - }; - - // Update timestamp - sqlx::query(r#"UPDATE "Playlists" SET lastupdated = CURRENT_TIMESTAMP WHERE playlistid = $1"#) - .bind(playlist_id) - .execute(pool) + let youtube_row = sqlx::query(r#"SELECT COUNT(*) as count FROM "YouTubeVideos" WHERE podcastid = $1"#) + .bind(podcast_id) + .fetch_one(pool) .await?; + let youtube_count: i64 = youtube_row.try_get("count")?; - Ok(episode_count) + (episode_count, youtube_count) } DatabasePool::MySQL(pool) => { - // Get playlist configuration first - let playlist = sqlx::query(" - SELECT PlaylistID, PlaylistName, UserID, PodcastIds, IncludeUnplayed, - IncludePartiallyPlayed, IncludePlayed, PlayProgressMin, PlayProgressMax, - TimeFilterHours, MinDuration, MaxDuration, SortOrder, - GroupByPodcast, MaxEpisodes, IsSystemPlaylist - FROM Playlists WHERE PlaylistID = ? - ") - .bind(playlist_id) + let episode_row = sqlx::query("SELECT COUNT(*) as count FROM Episodes WHERE PodcastID = ?") + .bind(podcast_id) .fetch_one(pool) .await?; + let episode_count: i64 = episode_row.try_get("count")?; - // Clear existing contents - sqlx::query("DELETE FROM PlaylistContents WHERE PlaylistID = ?") - .bind(playlist_id) - .execute(pool) - .await?; - - // Handle special playlists - let playlist_name: String = playlist.try_get("PlaylistName")?; - let user_id: i32 = playlist.try_get("UserID")?; - let is_system_playlist: bool = playlist.try_get("IsSystemPlaylist").unwrap_or(false); - - let episode_count = if playlist_name == "Fresh Releases" && is_system_playlist { - // Special handling for Fresh Releases - self.update_fresh_releases_playlist_mysql(pool, playlist_id).await? - } else { - // Standard playlist query building - self.build_and_execute_playlist_query_mysql(pool, &playlist).await? - }; - - // Update timestamp - sqlx::query("UPDATE Playlists SET LastUpdated = CURRENT_TIMESTAMP WHERE PlaylistID = ?") - .bind(playlist_id) - .execute(pool) + let youtube_row = sqlx::query("SELECT COUNT(*) as count FROM YouTubeVideos WHERE PodcastID = ?") + .bind(podcast_id) + .fetch_one(pool) .await?; + let youtube_count: i64 = youtube_row.try_get("count")?; - Ok(episode_count) + (episode_count, youtube_count) } - } - } - - // Get all people/hosts for refresh_hosts endpoint - matches Python refresh_all_hosts function - pub async fn get_all_people_for_refresh(&self) -> AppResult> { + }; + + let total_count = episode_count + youtube_count; + + // Update podcast episode count match self { DatabasePool::Postgres(pool) => { - let people = sqlx::query(r#" - SELECT DISTINCT p.personid, p.name, p.userid - FROM "People" p - "#) - .fetch_all(pool) + sqlx::query(r#"UPDATE "Podcasts" SET episodecount = $2 WHERE podcastid = $1"#) + .bind(podcast_id) + .bind(total_count as i32) + .execute(pool) .await?; - - let mut result = Vec::new(); - for person in people { - let person_id: i32 = person.try_get("personid")?; - let name: String = person.try_get("name")?; - let user_id: i32 = person.try_get("userid")?; - result.push((person_id, name, user_id)); - } - Ok(result) } DatabasePool::MySQL(pool) => { - let people = sqlx::query(" - SELECT DISTINCT p.PersonID, p.Name, p.UserID - FROM People p - ") - .fetch_all(pool) + sqlx::query("UPDATE Podcasts SET EpisodeCount = ? WHERE PodcastID = ?") + .bind(total_count as i32) + .bind(podcast_id) + .execute(pool) .await?; - - let mut result = Vec::new(); - for person in people { - let person_id: i32 = person.try_get("PersonID")?; - let name: String = person.try_get("Name")?; - let user_id: i32 = person.try_get("UserID")?; - result.push((person_id, name, user_id)); - } - Ok(result) } } + + println!("Updated episode count to {} ({} episodes + {} videos)", total_count, episode_count, youtube_count); + Ok(()) } - // Process person subscription - matches Python process_person_subscription function exactly - pub async fn process_person_subscription(&self, user_id: i32, person_id: i32, person_name: String) -> AppResult<()> { - use std::collections::HashSet; - - tracing::info!("Starting refresh for host: {} (ID: {})", person_name, person_id); - - let mut processed_shows: HashSet<(String, String, i32)> = HashSet::new(); - let people_url = std::env::var("PEOPLE_API_URL").unwrap_or_else(|_| "https://podpeople.pinepods.online".to_string()); - let api_url = std::env::var("SEARCH_API_URL").unwrap_or_else(|_| "https://api.pinepods.online/api/search".to_string()); - - // 1. Get podcasts from podpeople - let client = reqwest::Client::builder() - .timeout(std::time::Duration::from_secs(30)) - .build() - .map_err(|e| AppError::internal(&format!("Failed to create HTTP client: {}", e)))?; - - match client - .get(&format!("{}/api/hostsearch", people_url)) - .query(&[("name", &person_name)]) - .send() - .await - { - Ok(response) => { - if let Ok(podpeople_data) = response.json::().await { - if podpeople_data.get("success").and_then(|v| v.as_bool()).unwrap_or(false) { - if let Some(podcasts) = podpeople_data.get("podcasts").and_then(|v| v.as_array()) { - for podcast in podcasts { - if let (Some(title), Some(feed_url), Some(id)) = ( - podcast.get("title").and_then(|v| v.as_str()), - podcast.get("feed_url").and_then(|v| v.as_str()), - podcast.get("id").and_then(|v| v.as_i64()), - ) { - processed_shows.insert((title.to_string(), feed_url.to_string(), id as i32)); - } - } - } - } + // Get user history - matches Python user_history function exactly with YouTube UNION + pub async fn user_history(&self, user_id: i32) -> AppResult> { + match self { + DatabasePool::Postgres(pool) => { + let rows = sqlx::query( + r#"SELECT * FROM ( + SELECT + "Episodes".episodeid as episodeid, + "UserEpisodeHistory".listendate as listendate, + "UserEpisodeHistory".listenduration as listenduration, + "Episodes".episodetitle as episodetitle, + "Episodes".episodedescription as episodedescription, + CASE + WHEN "Podcasts".usepodcastcoverscustomized = TRUE AND "Podcasts".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + WHEN "Users".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + ELSE "Episodes".episodeartwork + END as episodeartwork, + "Episodes".episodeurl as episodeurl, + "Episodes".episodeduration as episodeduration, + "Podcasts".podcastname as podcastname, + "Episodes".episodepubdate as episodepubdate, + "Episodes".completed as completed, + CASE WHEN "SavedEpisodes".episodeid IS NOT NULL THEN TRUE ELSE FALSE END AS saved, + CASE WHEN "EpisodeQueue".episodeid IS NOT NULL THEN TRUE ELSE FALSE END AS queued, + CASE WHEN "DownloadedEpisodes".episodeid IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded, + FALSE as is_youtube + FROM "UserEpisodeHistory" + JOIN "Episodes" ON "UserEpisodeHistory".episodeid = "Episodes".episodeid + JOIN "Podcasts" ON "Episodes".podcastid = "Podcasts".podcastid + LEFT JOIN "Users" ON "Podcasts".userid = "Users".userid + LEFT JOIN "SavedEpisodes" ON "Episodes".episodeid = "SavedEpisodes".episodeid AND "SavedEpisodes".userid = $1 + LEFT JOIN "EpisodeQueue" ON "Episodes".episodeid = "EpisodeQueue".episodeid AND "EpisodeQueue".userid = $1 + LEFT JOIN "DownloadedEpisodes" ON "Episodes".episodeid = "DownloadedEpisodes".episodeid AND "DownloadedEpisodes".userid = $1 + WHERE "UserEpisodeHistory".userid = $1 + + UNION ALL + + SELECT + "YouTubeVideos".videoid as episodeid, + NULL as listendate, + "YouTubeVideos".listenposition as listenduration, + "YouTubeVideos".videotitle as episodetitle, + "YouTubeVideos".videodescription as episodedescription, + CASE + WHEN "Podcasts".usepodcastcoverscustomized = TRUE AND "Podcasts".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + WHEN "Users".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + ELSE "YouTubeVideos".thumbnailurl + END as episodeartwork, + "YouTubeVideos".videourl as episodeurl, + "YouTubeVideos".duration as episodeduration, + "Podcasts".podcastname as podcastname, + "YouTubeVideos".publishedat as episodepubdate, + "YouTubeVideos".completed as completed, + CASE WHEN "SavedVideos".videoid IS NOT NULL THEN TRUE ELSE FALSE END AS saved, + CASE WHEN "EpisodeQueue".episodeid IS NOT NULL THEN TRUE ELSE FALSE END AS queued, + CASE WHEN "DownloadedVideos".videoid IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded, + TRUE as is_youtube + FROM "YouTubeVideos" + JOIN "Podcasts" ON "YouTubeVideos".podcastid = "Podcasts".podcastid + LEFT JOIN "Users" ON "Podcasts".userid = "Users".userid + LEFT JOIN "SavedVideos" ON "YouTubeVideos".videoid = "SavedVideos".videoid AND "SavedVideos".userid = $1 + LEFT JOIN "EpisodeQueue" ON "YouTubeVideos".videoid = "EpisodeQueue".episodeid AND "EpisodeQueue".userid = $1 + LEFT JOIN "DownloadedVideos" ON "YouTubeVideos".videoid = "DownloadedVideos".videoid AND "DownloadedVideos".userid = $1 + WHERE "YouTubeVideos".listenposition > 0 + AND "Podcasts".userid = $1 + ) combined_results + ORDER BY listendate DESC NULLS LAST"# + ) + .bind(user_id) + .fetch_all(pool) + .await?; + + let mut episodes = Vec::new(); + for row in rows { + let listendate = row.try_get::, _>("listendate")? + .map(|dt| dt.format("%Y-%m-%dT%H:%M:%S").to_string()); + let episodepubdate = row.try_get::, _>("episodepubdate")? + .map(|dt| dt.format("%Y-%m-%dT%H:%M:%S").to_string()); + + episodes.push(serde_json::json!({ + "episodeid": row.get::, _>("episodeid"), + "listendate": listendate, + "listenduration": row.get::, _>("listenduration"), + "episodetitle": row.get::, _>("episodetitle"), + "episodedescription": row.get::, _>("episodedescription"), + "episodeartwork": row.get::, _>("episodeartwork"), + "episodeurl": row.get::, _>("episodeurl"), + "episodeduration": row.get::, _>("episodeduration"), + "podcastname": row.get::, _>("podcastname"), + "episodepubdate": episodepubdate, + "completed": row.get::, _>("completed"), + "saved": row.get::, _>("saved"), + "queued": row.get::, _>("queued"), + "downloaded": row.get::, _>("downloaded"), + "is_youtube": row.get::, _>("is_youtube") + })); } + Ok(episodes) } - Err(e) => { - tracing::error!("Error getting data from podpeople: {}", e); + DatabasePool::MySQL(pool) => { + let rows = sqlx::query( + "SELECT * FROM ( + SELECT + e.EpisodeID as episodeid, + ueh.ListenDate as listendate, + ueh.ListenDuration as listenduration, + e.EpisodeTitle as episodetitle, + e.EpisodeDescription as episodedescription, + CASE + WHEN p.UsePodcastCoversCustomized = 1 AND p.UsePodcastCovers = 1 THEN p.ArtworkURL + WHEN u.UsePodcastCovers = 1 THEN p.ArtworkURL + ELSE e.EpisodeArtwork + END as episodeartwork, + e.EpisodeURL as episodeurl, + e.EpisodeDuration as episodeduration, + p.PodcastName as podcastname, + e.EpisodePubDate as episodepubdate, + e.Completed as completed, + CASE WHEN se.EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS saved, + CASE WHEN eq.EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS queued, + CASE WHEN de.EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded, + 0 as is_youtube + FROM UserEpisodeHistory ueh + JOIN Episodes e ON ueh.EpisodeID = e.EpisodeID + JOIN Podcasts p ON e.PodcastID = p.PodcastID + LEFT JOIN Users u ON p.UserID = u.UserID + LEFT JOIN SavedEpisodes se ON e.EpisodeID = se.EpisodeID AND se.UserID = ? + LEFT JOIN EpisodeQueue eq ON e.EpisodeID = eq.EpisodeID AND eq.UserID = ? + LEFT JOIN DownloadedEpisodes de ON e.EpisodeID = de.EpisodeID AND de.UserID = ? + WHERE ueh.UserID = ? + + UNION ALL + + SELECT + yv.VideoID as episodeid, + NULL as listendate, + yv.ListenPosition as listenduration, + yv.VideoTitle as episodetitle, + yv.VideoDescription as episodedescription, + CASE + WHEN p.UsePodcastCoversCustomized = 1 AND p.UsePodcastCovers = 1 THEN p.ArtworkURL + WHEN u.UsePodcastCovers = 1 THEN p.ArtworkURL + ELSE yv.ThumbnailURL + END as episodeartwork, + yv.VideoURL as episodeurl, + yv.Duration as episodeduration, + p.PodcastName as podcastname, + yv.PublishedAt as episodepubdate, + yv.Completed as completed, + CASE WHEN sv.VideoID IS NOT NULL THEN TRUE ELSE FALSE END AS saved, + CASE WHEN eq.EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS queued, + CASE WHEN dv.VideoID IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded, + 1 as is_youtube + FROM YouTubeVideos yv + JOIN Podcasts p ON yv.PodcastID = p.PodcastID + LEFT JOIN Users u ON p.UserID = u.UserID + LEFT JOIN SavedVideos sv ON yv.VideoID = sv.VideoID AND sv.UserID = ? + LEFT JOIN EpisodeQueue eq ON yv.VideoID = eq.EpisodeID AND eq.UserID = ? + LEFT JOIN DownloadedVideos dv ON yv.VideoID = dv.VideoID AND dv.UserID = ? + WHERE yv.ListenPosition > 0 + AND p.UserID = ? + ) combined_results + ORDER BY listendate DESC" + ) + .bind(user_id) // SavedEpisodes join + .bind(user_id) // EpisodeQueue join + .bind(user_id) // DownloadedEpisodes join + .bind(user_id) // WHERE clause + .bind(user_id) // SavedVideos join + .bind(user_id) // EpisodeQueue join (YouTube) + .bind(user_id) // DownloadedVideos join + .bind(user_id) // WHERE clause (YouTube) + .fetch_all(pool) + .await?; + + let mut episodes = Vec::new(); + for row in rows { + let listendate = row.try_get::, _>("listendate")? + .map(|dt| dt.format("%Y-%m-%dT%H:%M:%S").to_string()); + let episodepubdate = row.try_get::, _>("episodepubdate")? + .map(|dt| dt.format("%Y-%m-%dT%H:%M:%S").to_string()); + + episodes.push(serde_json::json!({ + "episodeid": row.get::, _>("episodeid"), + "listendate": listendate, + "listenduration": row.get::, _>("listenduration"), + "episodetitle": row.get::, _>("episodetitle"), + "episodedescription": row.get::, _>("episodedescription"), + "episodeartwork": row.get::, _>("episodeartwork"), + "episodeurl": row.get::, _>("episodeurl"), + "episodeduration": row.get::, _>("episodeduration"), + "podcastname": row.get::, _>("podcastname"), + "episodepubdate": episodepubdate, + "completed": row.get::, _>("completed"), + "saved": row.get::, _>("saved"), + "queued": row.get::, _>("queued"), + "downloaded": row.get::, _>("downloaded"), + "is_youtube": row.get::, _>("is_youtube") + })); + } + Ok(episodes) } } - - // 2. Get podcasts from podcast index - tracing::info!("API URL configured as: {}", api_url); - match client - .get(&api_url) - .query(&[ - ("query", person_name.as_str()), - ("index", "person"), - ("search_type", "person") - ]) - .send() - .await - { - Ok(response) => { - if let Ok(index_data) = response.json::().await { - if let Some(items) = index_data.get("items").and_then(|v| v.as_array()) { - for episode in items { - if let (Some(title), Some(feed_url), Some(feed_id)) = ( - episode.get("feedTitle").and_then(|v| v.as_str()), - episode.get("feedUrl").and_then(|v| v.as_str()), - episode.get("feedId").and_then(|v| v.as_i64()), - ) { - processed_shows.insert((title.to_string(), feed_url.to_string(), feed_id as i32)); - } - } - } + } + + // Increment listen time - matches Python increment_listen_time function exactly + pub async fn increment_listen_time(&self, user_id: i32) -> AppResult<()> { + match self { + DatabasePool::Postgres(pool) => { + sqlx::query(r#"UPDATE "UserStats" SET TimeListened = TimeListened + 1 WHERE UserID = $1"#) + .bind(user_id) + .execute(pool) + .await?; + Ok(()) + } + DatabasePool::MySQL(pool) => { + sqlx::query("UPDATE UserStats SET TimeListened = TimeListened + 1 WHERE UserID = ?") + .bind(user_id) + .execute(pool) + .await?; + Ok(()) + } + } + } + + // Get playback speed - matches Python get_playback_speed function exactly + pub async fn get_playback_speed(&self, user_id: i32, _is_youtube: bool, podcast_id: Option) -> AppResult { + match self { + DatabasePool::Postgres(pool) => { + let query = if let Some(_pod_id) = podcast_id { + r#"SELECT PlaybackSpeed FROM "Podcasts" WHERE PodcastID = $1"# + } else { + r#"SELECT PlaybackSpeed FROM "Users" WHERE UserID = $1"# + }; + + let param = podcast_id.unwrap_or(user_id); + let row = sqlx::query(query) + .bind(param) + .fetch_one(pool) + .await?; + + Ok(row.try_get::("PlaybackSpeed").unwrap_or(1.0)) + } + DatabasePool::MySQL(pool) => { + let query = if let Some(_pod_id) = podcast_id { + "SELECT PlaybackSpeed FROM Podcasts WHERE PodcastID = ?" + } else { + "SELECT PlaybackSpeed FROM Users WHERE UserID = ?" + }; + + let param = podcast_id.unwrap_or(user_id); + let row = sqlx::query(query) + .bind(param) + .fetch_one(pool) + .await?; + + if let Ok(speed) = row.try_get::("PlaybackSpeed") { + Ok(speed.to_f64().unwrap_or(1.0)) + } else { + Ok(1.0) } } - Err(e) => { - tracing::error!("Error getting data from podcast index: {}", e); + } + } + + // Add news feed if not already added - matches Python add_news_feed_if_not_added function exactly + + // Cleanup old episodes - matches Python cleanup_old_episodes function exactly + pub async fn cleanup_old_episodes(&self) -> AppResult<()> { + self.cleanup_old_people_episodes(30).await?; + self.cleanup_expired_shared_episodes().await?; + Ok(()) + } + + // Cleanup old people episodes - matches Python cleanup_old_people_episodes function exactly + pub async fn cleanup_old_people_episodes(&self, days: i32) -> AppResult<()> { + let cutoff_date = chrono::Utc::now() - chrono::Duration::days(days as i64); + + match self { + DatabasePool::Postgres(pool) => { + let result = sqlx::query(r#"DELETE FROM "PeopleEpisodes" WHERE addeddate < $1"#) + .bind(cutoff_date) + .execute(pool) + .await?; + + tracing::info!("Cleaned up {} old PeopleEpisodes records older than {} days", result.rows_affected(), days); + } + DatabasePool::MySQL(pool) => { + let result = sqlx::query("DELETE FROM PeopleEpisodes WHERE AddedDate < ?") + .bind(cutoff_date) + .execute(pool) + .await?; + + tracing::info!("Cleaned up {} old PeopleEpisodes records older than {} days", result.rows_affected(), days); + } + } + Ok(()) + } + + // Cleanup expired shared episodes - matches Python cleanup_expired_shared_episodes function exactly + pub async fn cleanup_expired_shared_episodes(&self) -> AppResult<()> { + let now = chrono::Utc::now(); + + match self { + DatabasePool::Postgres(pool) => { + let result = sqlx::query(r#"DELETE FROM "SharedEpisodes" WHERE expirationdate < $1"#) + .bind(now) + .execute(pool) + .await?; + + tracing::info!("Cleaned up {} expired SharedEpisodes records", result.rows_affected()); + } + DatabasePool::MySQL(pool) => { + let result = sqlx::query("DELETE FROM SharedEpisodes WHERE ExpirationDate < ?") + .bind(now) + .execute(pool) + .await?; + + tracing::info!("Cleaned up {} expired SharedEpisodes records", result.rows_affected()); + } + } + Ok(()) + } + + // Update all playlists - matches Python update_all_playlists function exactly + pub async fn update_all_playlists(&self) -> AppResult<()> { + tracing::info!("=================== PLAYLIST UPDATE STARTING ==================="); + tracing::info!("Starting to fetch all playlists"); + + match self { + DatabasePool::Postgres(pool) => { + let playlists = sqlx::query(r#" + SELECT playlistid, name, userid, podcastids, includeunplayed, + includepartiallyplayed, includeplayed, playprogressmin, playprogressmax, + timefilterhours, minduration, maxduration, sortorder, + groupbypodcast, maxepisodes + FROM "Playlists" + "#) + .fetch_all(pool) + .await?; + + tracing::info!("Found {} playlists to update", playlists.len()); + + for playlist in playlists { + let playlist_id: i32 = playlist.try_get("playlistid")?; + let playlist_name: String = playlist.try_get("name")?; + let user_id: i32 = playlist.try_get("userid")?; + + tracing::info!("Updating playlist: {} (ID: {}, User: {})", playlist_name, playlist_id, user_id); + + match self.update_playlist_contents(playlist_id).await { + Ok(episode_count) => { + tracing::info!("Successfully updated playlist '{}': {} episodes", playlist_name, episode_count); + } + Err(e) => { + tracing::error!("Failed to update playlist '{}' (ID: {}): {}", playlist_name, playlist_id, e); + // Continue with other playlists + } + } + } + } + DatabasePool::MySQL(pool) => { + let playlists = sqlx::query(" + SELECT PlaylistID, Name, UserID, PodcastIds, IncludeUnplayed, + IncludePartiallyPlayed, IncludePlayed, PlayProgressMin, PlayProgressMax, + TimeFilterHours, MinDuration, MaxDuration, SortOrder, + GroupByPodcast, MaxEpisodes + FROM Playlists + ") + .fetch_all(pool) + .await?; + + tracing::info!("Found {} playlists to update", playlists.len()); + + for playlist in playlists { + let playlist_id: i32 = playlist.try_get("PlaylistID")?; + let playlist_name: String = playlist.try_get("Name")?; + let user_id: i32 = playlist.try_get("UserID")?; + + tracing::info!("Updating playlist: {} (ID: {}, User: {})", playlist_name, playlist_id, user_id); + + match self.update_playlist_contents(playlist_id).await { + Ok(episode_count) => { + tracing::info!("Successfully updated playlist '{}': {} episodes", playlist_name, episode_count); + } + Err(e) => { + tracing::error!("Failed to update playlist '{}' (ID: {}): {}", playlist_name, playlist_id, e); + // Continue with other playlists + } + } + } + } + } + + tracing::info!("=================== PLAYLIST UPDATE COMPLETED ==================="); + Ok(()) + } + + // Update playlist contents - matches Python update_playlist_contents function exactly + pub async fn update_playlist_contents(&self, playlist_id: i32) -> AppResult { + tracing::info!("======= UPDATE PLAYLIST ID: {} =======", playlist_id); + + match self { + DatabasePool::Postgres(pool) => { + // Get playlist configuration first + let playlist = sqlx::query(r#" + SELECT playlistid, name, userid, podcastids, includeunplayed, + includepartiallyplayed, includeplayed, playprogressmin, playprogressmax, + timefilterhours, minduration, maxduration, sortorder, + groupbypodcast, maxepisodes, issystemplaylist + FROM "Playlists" WHERE playlistid = $1 + "#) + .bind(playlist_id) + .fetch_one(pool) + .await?; + + // Clear existing contents + sqlx::query(r#"DELETE FROM "PlaylistContents" WHERE playlistid = $1"#) + .bind(playlist_id) + .execute(pool) + .await?; + + // Handle special playlists + let playlist_name: String = playlist.try_get("name")?; + let is_system_playlist: bool = playlist.try_get("issystemplaylist").unwrap_or(false); + + let episode_count = if playlist_name == "Fresh Releases" && is_system_playlist { + // Special handling for Fresh Releases + self.update_fresh_releases_playlist_postgres(pool, playlist_id).await? + } else if playlist_name == "Currently Listening" && is_system_playlist { + // Special handling for Currently Listening + self.update_currently_listening_playlist_postgres(pool, playlist_id).await? + } else if playlist_name == "Almost Done" && is_system_playlist { + // Special handling for Almost Done + self.update_almost_done_playlist_postgres(pool, playlist_id).await? + } else if playlist_name == "Quick Listens" && is_system_playlist { + // Special handling for Quick Listens + self.update_quick_listens_playlist_postgres(pool, playlist_id).await? + } else { + // Standard playlist query building + self.build_and_execute_playlist_query_postgres(pool, &playlist).await? + }; + + // Update timestamp + sqlx::query(r#"UPDATE "Playlists" SET lastupdated = CURRENT_TIMESTAMP WHERE playlistid = $1"#) + .bind(playlist_id) + .execute(pool) + .await?; + + Ok(episode_count) + } + DatabasePool::MySQL(pool) => { + // Get playlist configuration first + let playlist = sqlx::query(" + SELECT PlaylistID, Name, UserID, PodcastIDs, IncludeUnplayed, + IncludePartiallyPlayed, IncludePlayed, PlayProgressMin, PlayProgressMax, + TimeFilterHours, MinDuration, MaxDuration, SortOrder, + GroupByPodcast, MaxEpisodes, IsSystemPlaylist + FROM Playlists WHERE PlaylistID = ? + ") + .bind(playlist_id) + .fetch_one(pool) + .await?; + + // Clear existing contents + sqlx::query("DELETE FROM PlaylistContents WHERE PlaylistID = ?") + .bind(playlist_id) + .execute(pool) + .await?; + + // Handle special playlists + let playlist_name: String = playlist.try_get("Name")?; + let is_system_playlist: bool = playlist.try_get("IsSystemPlaylist").unwrap_or(false); + + let episode_count = if playlist_name == "Fresh Releases" && is_system_playlist { + // Special handling for Fresh Releases + self.update_fresh_releases_playlist_mysql(pool, playlist_id).await? + } else if playlist_name == "Currently Listening" && is_system_playlist { + // Special handling for Currently Listening + self.update_currently_listening_playlist_mysql(pool, playlist_id).await? + } else if playlist_name == "Almost Done" && is_system_playlist { + // Special handling for Almost Done + self.update_almost_done_playlist_mysql(pool, playlist_id).await? + } else if playlist_name == "Quick Listens" && is_system_playlist { + // Special handling for Quick Listens + self.update_quick_listens_playlist_mysql(pool, playlist_id).await? + } else { + // Standard playlist query building + self.build_and_execute_playlist_query_mysql(pool, &playlist).await? + }; + + // Update timestamp + sqlx::query("UPDATE Playlists SET LastUpdated = CURRENT_TIMESTAMP WHERE PlaylistID = ?") + .bind(playlist_id) + .execute(pool) + .await?; + + Ok(episode_count) + } + } + } + + // Get all people/hosts for refresh_hosts endpoint - matches Python refresh_all_hosts function + pub async fn get_all_people_for_refresh(&self) -> AppResult> { + match self { + DatabasePool::Postgres(pool) => { + let people = sqlx::query(r#" + SELECT DISTINCT p.personid, p.name, p.userid + FROM "People" p + "#) + .fetch_all(pool) + .await?; + + let mut result = Vec::new(); + for person in people { + let person_id: i32 = person.try_get("personid")?; + let name: String = person.try_get("name")?; + let user_id: i32 = person.try_get("userid")?; + result.push((person_id, name, user_id)); + } + Ok(result) + } + DatabasePool::MySQL(pool) => { + let people = sqlx::query(" + SELECT DISTINCT p.PersonID, p.Name, p.UserID + FROM People p + ") + .fetch_all(pool) + .await?; + + let mut result = Vec::new(); + for person in people { + let person_id: i32 = person.try_get("PersonID")?; + let name: String = person.try_get("Name")?; + let user_id: i32 = person.try_get("UserID")?; + result.push((person_id, name, user_id)); + } + Ok(result) + } + } + } + + // Process person subscription - matches Python process_person_subscription function exactly + pub async fn process_person_subscription(&self, user_id: i32, person_id: i32, person_name: String) -> AppResult<()> { + use std::collections::HashSet; + + tracing::info!("Starting refresh for host: {} (ID: {})", person_name, person_id); + + let mut processed_shows: HashSet<(String, String, i32)> = HashSet::new(); + let people_url = std::env::var("PEOPLE_API_URL").unwrap_or_else(|_| "https://podpeople.pinepods.online".to_string()); + let api_url = std::env::var("SEARCH_API_URL").unwrap_or_else(|_| "https://api.pinepods.online/api/search".to_string()); + + // 1. Get podcasts from podpeople + let client = reqwest::Client::builder() + .timeout(std::time::Duration::from_secs(30)) + .build() + .map_err(|e| AppError::internal(&format!("Failed to create HTTP client: {}", e)))?; + + match client + .get(&format!("{}/api/hostsearch", people_url)) + .query(&[("name", &person_name)]) + .send() + .await + { + Ok(response) => { + if let Ok(podpeople_data) = response.json::().await { + if podpeople_data.get("success").and_then(|v| v.as_bool()).unwrap_or(false) { + if let Some(podcasts) = podpeople_data.get("podcasts").and_then(|v| v.as_array()) { + for podcast in podcasts { + if let (Some(title), Some(feed_url), Some(id)) = ( + podcast.get("title").and_then(|v| v.as_str()), + podcast.get("feed_url").and_then(|v| v.as_str()), + podcast.get("id").and_then(|v| v.as_i64()), + ) { + processed_shows.insert((title.to_string(), feed_url.to_string(), id as i32)); + } + } + } + } + } + } + Err(e) => { + tracing::error!("Error getting data from podpeople: {}", e); + } + } + + // 2. Get podcasts from podcast index + tracing::info!("API URL configured as: {}", api_url); + match client + .get(&api_url) + .query(&[ + ("query", person_name.as_str()), + ("index", "person"), + ("search_type", "person") + ]) + .send() + .await + { + Ok(response) => { + if let Ok(index_data) = response.json::().await { + if let Some(items) = index_data.get("items").and_then(|v| v.as_array()) { + for episode in items { + if let (Some(title), Some(feed_url), Some(feed_id)) = ( + episode.get("feedTitle").and_then(|v| v.as_str()), + episode.get("feedUrl").and_then(|v| v.as_str()), + episode.get("feedId").and_then(|v| v.as_i64()), + ) { + processed_shows.insert((title.to_string(), feed_url.to_string(), feed_id as i32)); + } + } + } + } + } + Err(e) => { + tracing::error!("Error getting data from podcast index: {}", e); + } + } + + if processed_shows.is_empty() { + tracing::info!("No shows found for person: {}", person_name); + return Ok(()); + } + + // 3. Process each unique show + for (title, feed_url, feed_id) in processed_shows { + match self.process_person_show(user_id, person_id, &title, &feed_url, feed_id).await { + Ok(_) => { + tracing::info!("Successfully processed show: {}", title); + } + Err(e) => { + tracing::error!("Error processing show {}: {}", title, e); + continue; + } + } + } + + Ok(()) + } + + // Helper function to process individual show for person - matches Python logic + async fn process_person_show(&self, user_id: i32, person_id: i32, title: &str, feed_url: &str, _feed_id: i32) -> AppResult<()> { + // First check if podcast exists for user + let user_podcast_id = self.get_podcast_id_by_feed_url(user_id, feed_url).await?; + + let podcast_id = if user_podcast_id.is_none() { + // Check if system podcast exists (UserID = 1) + let system_podcast_id = self.get_podcast_id_by_feed_url(1, feed_url).await?; + + if system_podcast_id.is_none() { + // Add as new system podcast + tracing::info!("Creating system podcast for feed: {}", feed_url); + let podcast_values = self.get_podcast_values_for_person(feed_url).await?; + let add_result = self.add_person_podcast_from_values(&podcast_values, 1).await?; + tracing::info!("Add podcast result: {}", add_result); + + // Get the podcast ID after adding + tracing::info!("Looking for podcast with UserID=1 and FeedURL='{}'", feed_url); + match self.get_podcast_id_by_feed_url(1, feed_url).await? { + Some(id) => { + tracing::info!("Successfully created system podcast with ID: {}", id); + id + } + None => { + // Let's debug by listing all podcasts for UserID=1 + tracing::error!("Failed to get podcast ID after adding system podcast for feed: {}", feed_url); + + // Debug: List all system podcasts to see what's there + match self { + DatabasePool::Postgres(pool) => { + let rows = sqlx::query(r#"SELECT podcastid, podcastname, feedurl FROM "Podcasts" WHERE userid = $1"#) + .bind(1) + .fetch_all(pool) + .await?; + + tracing::error!("System podcasts (UserID=1):"); + for row in rows { + let id: i32 = row.try_get("podcastid")?; + let name: String = row.try_get("podcastname")?; + let url: String = row.try_get("feedurl")?; + tracing::error!(" ID: {}, Name: '{}', URL: '{}'", id, name, url); + } + } + DatabasePool::MySQL(pool) => { + let rows = sqlx::query("SELECT PodcastID, PodcastName, FeedURL FROM Podcasts WHERE UserID = ?") + .bind(1) + .fetch_all(pool) + .await?; + + tracing::error!("System podcasts (UserID=1):"); + for row in rows { + let id: i32 = row.try_get("PodcastID")?; + let name: String = row.try_get("PodcastName")?; + let url: String = row.try_get("FeedURL")?; + tracing::error!(" ID: {}, Name: '{}', URL: '{}'", id, name, url); + } + } + } + + return Err(AppError::internal("Failed to create system podcast")); + } + } + } else { + system_podcast_id.unwrap() + } + } else { + user_podcast_id.unwrap() + }; + + tracing::info!("Using podcast: ID={}, Title={}", podcast_id, title); + + // Add episodes to PeopleEpisodes + self.add_people_episodes(person_id, podcast_id, feed_url).await?; + + Ok(()) + } + + // Add people episodes - matches Python add_people_episodes function exactly + pub async fn add_people_episodes(&self, person_id: i32, podcast_id: i32, feed_url: &str) -> AppResult<()> { + // Validate that we have a valid podcast ID + if podcast_id <= 0 { + return Err(AppError::internal(&format!("Invalid podcast ID {} for person episodes", podcast_id))); + } + + // Use the same robust feed fetching and parsing as add_episodes + let content = self.try_fetch_feed(feed_url, None, None).await?; + let episodes = self.parse_rss_feed(&content, podcast_id, "").await?; + + println!("Parsed {} episodes from feed for person {} with podcast ID {}", episodes.len(), person_id, podcast_id); + + let mut added_count = 0; + + for episode in episodes { + // Check if episode already exists + let episode_exists = match self { + DatabasePool::Postgres(pool) => { + let result = sqlx::query(r#" + SELECT episodeid FROM "PeopleEpisodes" + WHERE personid = $1 AND podcastid = $2 AND episodeurl = $3 + "#) + .bind(person_id) + .bind(podcast_id) + .bind(&episode.url) + .fetch_optional(pool) + .await?; + + result.is_some() + } + DatabasePool::MySQL(pool) => { + let result = sqlx::query(" + SELECT EpisodeID FROM PeopleEpisodes + WHERE PersonID = ? AND PodcastID = ? AND EpisodeURL = ? + ") + .bind(person_id) + .bind(podcast_id) + .bind(&episode.url) + .fetch_optional(pool) + .await?; + + result.is_some() + } + }; + + if episode_exists { + continue; + } + + // Insert new episode + match self { + DatabasePool::Postgres(pool) => { + // PostgreSQL expects timestamp type, not string + let naive_datetime = episode.pub_date.naive_utc(); + sqlx::query(r#" + INSERT INTO "PeopleEpisodes" + (personid, podcastid, episodetitle, episodedescription, + episodeurl, episodeartwork, episodepubdate, episodeduration) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8) + "#) + .bind(person_id) + .bind(podcast_id) + .bind(&episode.title) + .bind(&episode.description) + .bind(&episode.url) + .bind(&episode.artwork_url) + .bind(naive_datetime) + .bind(episode.duration as i32) + .execute(pool) + .await?; + } + DatabasePool::MySQL(pool) => { + // MySQL accepts string format + let pub_date_str = episode.pub_date.format("%Y-%m-%d %H:%M:%S").to_string(); + sqlx::query(" + INSERT INTO PeopleEpisodes + (PersonID, PodcastID, EpisodeTitle, EpisodeDescription, + EpisodeURL, EpisodeArtwork, EpisodePubDate, EpisodeDuration) + VALUES (?, ?, ?, ?, ?, ?, ?, ?) + ") + .bind(person_id) + .bind(podcast_id) + .bind(&episode.title) + .bind(&episode.description) + .bind(&episode.url) + .bind(&episode.artwork_url) + .bind(&pub_date_str) + .bind(episode.duration as i32) + .execute(pool) + .await?; + } + } + + added_count += 1; + } + + println!("Successfully added {} new episodes for person {} from podcast {}", added_count, person_id, podcast_id); + Ok(()) + } + + // Helper function to parse duration from string + fn parse_duration(&self, duration_str: &str) -> Option { + if duration_str.contains(':') { + let parts: Vec<&str> = duration_str.split(':').collect(); + match parts.len() { + 2 => { + let minutes: i64 = parts[0].parse().ok()?; + let seconds: i64 = parts[1].parse().ok()?; + Some(minutes * 60 + seconds) + } + 3 => { + let hours: i64 = parts[0].parse().ok()?; + let minutes: i64 = parts[1].parse().ok()?; + let seconds: i64 = parts[2].parse().ok()?; + Some(hours * 3600 + minutes * 60 + seconds) + } + _ => None, + } + } else if let Ok(duration) = duration_str.parse::() { + Some(duration) + } else { + None + } + } + + // Add person podcast from values map - matches Python add_person_podcast function exactly + pub async fn add_person_podcast_from_values(&self, podcast_values: &std::collections::HashMap, user_id: i32) -> AppResult { + // Use the same key mapping as add_podcast_from_values + let pod_title = podcast_values.get("podcastname").cloned().unwrap_or_default(); + let pod_feed_url = podcast_values.get("feedurl").cloned().unwrap_or_default(); + let pod_artwork = podcast_values.get("artworkurl").cloned().unwrap_or_default(); + let pod_description = podcast_values.get("description").cloned().unwrap_or_default(); + // First check if podcast already exists for user with a valid feed URL + match self { + DatabasePool::Postgres(pool) => { + let existing = sqlx::query(r#"SELECT podcastid FROM "Podcasts" WHERE feedurl = $1 AND userid = $2 AND feedurl != ''"#) + .bind(&pod_feed_url) + .bind(user_id) + .fetch_optional(pool) + .await?; + + if existing.is_some() { + return Ok(true); // Already exists + } + + // Insert new podcast + sqlx::query(r#" + INSERT INTO "Podcasts" (podcastname, feedurl, artworkurl, description, + userid, autodownload, isyoutubechannel, podcastindexid) + VALUES ($1, $2, $3, $4, $5, FALSE, FALSE, $6) + "#) + .bind(&pod_title) + .bind(&pod_feed_url) + .bind(&pod_artwork) + .bind(&pod_description) + .bind(user_id) + .bind(0) // podcast_index_id placeholder + .execute(pool) + .await?; + } + DatabasePool::MySQL(pool) => { + let existing = sqlx::query("SELECT PodcastID FROM Podcasts WHERE FeedURL = ? AND UserID = ? AND FeedURL != ''") + .bind(&pod_feed_url) + .bind(user_id) + .fetch_optional(pool) + .await?; + + if existing.is_some() { + return Ok(true); // Already exists + } + + // Insert new podcast + sqlx::query(" + INSERT INTO Podcasts (PodcastName, FeedURL, ArtworkURL, Description, + UserID, AutoDownload, IsYouTubeChannel, PodcastIndexID) + VALUES (?, ?, ?, ?, ?, 0, 0, ?) + ") + .bind(&pod_title) + .bind(&pod_feed_url) + .bind(&pod_artwork) + .bind(&pod_description) + .bind(user_id) + .bind(0) // podcast_index_id placeholder + .execute(pool) + .await?; + } + } + + Ok(true) + } + + // Get podcast ID by feed URL + pub async fn get_podcast_id_by_feed_url(&self, user_id: i32, feed_url: &str) -> AppResult> { + match self { + DatabasePool::Postgres(pool) => { + let result = sqlx::query(r#"SELECT podcastid FROM "Podcasts" WHERE feedurl = $1 AND userid = $2"#) + .bind(feed_url) + .bind(user_id) + .fetch_optional(pool) + .await?; + + Ok(result.map(|row| row.try_get("podcastid")).transpose()?) + } + DatabasePool::MySQL(pool) => { + let result = sqlx::query("SELECT PodcastID FROM Podcasts WHERE FeedURL = ? AND UserID = ?") + .bind(feed_url) + .bind(user_id) + .fetch_optional(pool) + .await?; + + Ok(result.map(|row| row.try_get("PodcastID")).transpose()?) + } + } + } + + // Get podcast values for person podcasts - uses existing get_podcast_values function + pub async fn get_podcast_values_for_person(&self, feed_url: &str) -> AppResult> { + self.get_podcast_values(feed_url, 1, None, None).await + } + + // Helper function to normalize timezone names for database compatibility + fn normalize_timezone(tz_str: &str) -> String { + // Try to parse the timezone string with chrono-tz + if let Ok(tz) = tz_str.parse::() { + // Return the canonical name for this timezone + tz.name().to_string() + } else { + // If parsing fails, return UTC as fallback + tracing::warn!("Unable to parse timezone '{}', falling back to UTC", tz_str); + "UTC".to_string() + } + } + + // COMPLETE PLAYLIST SYSTEM IMPLEMENTATION - matches Python functionality exactly + + // Update Fresh Releases playlist with timezone-aware logic - matches Python update_fresh_releases_playlist + async fn update_fresh_releases_playlist_postgres(&self, pool: &Pool, playlist_id: i32) -> AppResult { + tracing::info!("Updating Fresh Releases playlist with timezone logic"); + + // Get all users with their timezones + let users = sqlx::query(r#"SELECT userid, timezone FROM "Users""#) + .fetch_all(pool) + .await?; + + let mut added_episodes: std::collections::HashSet = std::collections::HashSet::new(); + let mut position = 1; + + for user in users { + let user_id: i32 = user.try_get("userid")?; + let timezone: Option = user.try_get("timezone").ok(); + let raw_tz = timezone.as_deref() + .filter(|s| !s.is_empty()) // Filter out empty strings + .unwrap_or("UTC"); + + let normalized_tz = Self::normalize_timezone(raw_tz); + + tracing::info!("Processing Fresh Releases for user {} with timezone {} (normalized: {})", user_id, raw_tz, normalized_tz); + + // Get episodes from last 24 hours in user's timezone + let episodes = sqlx::query(r#" + SELECT e.episodeid + FROM "Episodes" e + JOIN "Podcasts" p ON e.podcastid = p.podcastid + WHERE e.episodepubdate AT TIME ZONE 'UTC' AT TIME ZONE $1 > + (CURRENT_TIMESTAMP AT TIME ZONE 'UTC' AT TIME ZONE $1 - INTERVAL '24 hours') + ORDER BY e.episodepubdate DESC + "#) + .bind(&normalized_tz) + .fetch_all(pool) + .await?; + + // Add unique episodes to playlist + for episode in episodes { + let episode_id: i32 = episode.try_get("episodeid")?; + if !added_episodes.contains(&episode_id) { + sqlx::query(r#" + INSERT INTO "PlaylistContents" (playlistid, episodeid, position) + VALUES ($1, $2, $3) + "#) + .bind(playlist_id) + .bind(episode_id) + .bind(position) + .execute(pool) + .await?; + + added_episodes.insert(episode_id); + position += 1; + } + } + } + + tracing::info!("Fresh Releases playlist updated with {} episodes", added_episodes.len()); + Ok(added_episodes.len() as i32) + } + + // Update Currently Listening playlist - episodes that users have started but not finished + async fn update_currently_listening_playlist_postgres(&self, pool: &Pool, playlist_id: i32) -> AppResult { + tracing::info!("Updating Currently Listening playlist"); + + // Get all users and their currently listening episodes + let users = sqlx::query(r#"SELECT userid FROM "Users""#) + .fetch_all(pool) + .await?; + + let mut added_episodes: std::collections::HashSet = std::collections::HashSet::new(); + let mut position = 1; + + for user in users { + let user_id: i32 = user.try_get("userid")?; + + tracing::info!("Processing Currently Listening for user {}", user_id); + + // Get episodes user has started but not finished + let episodes = sqlx::query(r#" + SELECT e.episodeid + FROM "Episodes" e + JOIN "UserEpisodeHistory" h ON e.episodeid = h.episodeid + WHERE h.userid = $1 + AND h.listenduration > 0 + AND h.listenduration < e.episodeduration + ORDER BY h.listendate DESC + "#) + .bind(user_id) + .fetch_all(pool) + .await?; + + // Add unique episodes to playlist + for episode in episodes { + let episode_id: i32 = episode.try_get("episodeid")?; + if !added_episodes.contains(&episode_id) { + sqlx::query(r#"INSERT INTO "PlaylistContents" (playlistid, episodeid, position) VALUES ($1, $2, $3)"#) + .bind(playlist_id) + .bind(episode_id) + .bind(position) + .execute(pool) + .await?; + + added_episodes.insert(episode_id); + position += 1; + } + } + } + + tracing::info!("Currently Listening playlist updated with {} episodes", added_episodes.len()); + Ok(added_episodes.len() as i32) + } + + // Update Almost Done playlist - episodes that users are 75%+ through + async fn update_almost_done_playlist_postgres(&self, pool: &Pool, playlist_id: i32) -> AppResult { + tracing::info!("Updating Almost Done playlist"); + + // Get all users and their almost done episodes + let users = sqlx::query(r#"SELECT userid FROM "Users""#) + .fetch_all(pool) + .await?; + + let mut added_episodes: std::collections::HashSet = std::collections::HashSet::new(); + let mut position = 1; + + for user in users { + let user_id: i32 = user.try_get("userid")?; + + tracing::info!("Processing Almost Done for user {}", user_id); + + // Get episodes user is 75%+ through but not completed + let episodes = sqlx::query(r#" + SELECT e.episodeid + FROM "Episodes" e + JOIN "UserEpisodeHistory" h ON e.episodeid = h.episodeid + WHERE h.userid = $1 + AND h.listenduration > 0 + AND h.listenduration < e.episodeduration + AND (h.listenduration::float / NULLIF(e.episodeduration, 0)) >= 0.75 + ORDER BY h.listendate DESC + "#) + .bind(user_id) + .fetch_all(pool) + .await?; + + // Add unique episodes to playlist + for episode in episodes { + let episode_id: i32 = episode.try_get("episodeid")?; + if !added_episodes.contains(&episode_id) { + sqlx::query(r#"INSERT INTO "PlaylistContents" (playlistid, episodeid, position) VALUES ($1, $2, $3)"#) + .bind(playlist_id) + .bind(episode_id) + .bind(position) + .execute(pool) + .await?; + + added_episodes.insert(episode_id); + position += 1; + } + } + } + + tracing::info!("Almost Done playlist updated with {} episodes", added_episodes.len()); + Ok(added_episodes.len() as i32) + } + + // Update Quick Listens playlist - episodes under 15 minutes from ALL users' podcasts + async fn update_quick_listens_playlist_postgres(&self, pool: &Pool, playlist_id: i32) -> AppResult { + tracing::info!("Updating Quick Listens playlist"); + + // Get shortest 1000 episodes under 15 minutes (900 seconds) and over 1 second from ALL podcasts + let episodes = sqlx::query(r#" + SELECT e.episodeid + FROM "Episodes" e + JOIN "Podcasts" p ON e.podcastid = p.podcastid + WHERE e.episodeduration >= 1 + AND e.episodeduration <= 900 + AND e.completed = FALSE + ORDER BY e.episodeduration ASC + LIMIT 1000 + "#) + .fetch_all(pool) + .await?; + + let mut position = 1; + + // Add episodes to playlist + for episode in episodes { + let episode_id: i32 = episode.try_get("episodeid")?; + sqlx::query(r#"INSERT INTO "PlaylistContents" (playlistid, episodeid, position) VALUES ($1, $2, $3)"#) + .bind(playlist_id) + .bind(episode_id) + .bind(position) + .execute(pool) + .await?; + + position += 1; + } + + let episode_count = position - 1; + tracing::info!("Quick Listens playlist updated with {} episodes", episode_count); + Ok(episode_count) + } + + async fn update_quick_listens_playlist_mysql(&self, pool: &Pool, playlist_id: i32) -> AppResult { + tracing::info!("Updating Quick Listens playlist"); + + // Get shortest 1000 episodes under 15 minutes (900 seconds) and over 1 second from ALL podcasts + let episodes = sqlx::query(" + SELECT e.EpisodeID + FROM Episodes e + JOIN Podcasts p ON e.PodcastID = p.PodcastID + WHERE e.EpisodeDuration >= 1 + AND e.EpisodeDuration <= 900 + AND e.Completed = FALSE + ORDER BY e.EpisodeDuration ASC + LIMIT 1000 + ") + .fetch_all(pool) + .await?; + + let mut position = 1; + + // Add episodes to playlist + for episode in episodes { + let episode_id: i32 = episode.try_get("EpisodeID")?; + sqlx::query("INSERT INTO PlaylistContents (PlaylistID, EpisodeID, Position) VALUES (?, ?, ?)") + .bind(playlist_id) + .bind(episode_id) + .bind(position) + .execute(pool) + .await?; + + position += 1; + } + + let episode_count = position - 1; + tracing::info!("Quick Listens playlist updated with {} episodes", episode_count); + Ok(episode_count) + } + + async fn update_fresh_releases_playlist_mysql(&self, pool: &Pool, playlist_id: i32) -> AppResult { + tracing::info!("Updating Fresh Releases playlist with timezone logic"); + + // Get all users with their timezones + let users = sqlx::query("SELECT UserID, TimeZone FROM Users") + .fetch_all(pool) + .await?; + + let mut added_episodes: std::collections::HashSet = std::collections::HashSet::new(); + let mut position = 1; + + for user in users { + let user_id: i32 = user.try_get("UserID")?; + let timezone: Option = user.try_get("TimeZone").ok(); + let raw_tz = timezone.as_deref() + .filter(|s| !s.is_empty()) // Filter out empty strings + .unwrap_or("UTC"); + + let normalized_tz = Self::normalize_timezone(raw_tz); + + tracing::info!("Processing Fresh Releases for user {} with timezone {} (normalized: {})", user_id, raw_tz, normalized_tz); + + // Get episodes from last 24 hours in user's timezone + let episodes = sqlx::query(" + SELECT e.EpisodeID + FROM Episodes e + JOIN Podcasts p ON e.PodcastID = p.PodcastID + WHERE CONVERT_TZ(e.EpisodePubDate, 'UTC', ?) > + DATE_SUB(CONVERT_TZ(NOW(), 'UTC', ?), INTERVAL 24 HOUR) + ORDER BY e.EpisodePubDate DESC + ") + .bind(&normalized_tz) + .bind(&normalized_tz) + .fetch_all(pool) + .await?; + + // Add unique episodes to playlist + for episode in episodes { + let episode_id: i32 = episode.try_get("EpisodeID")?; + if !added_episodes.contains(&episode_id) { + sqlx::query(" + INSERT INTO PlaylistContents (PlaylistID, EpisodeID, Position) + VALUES (?, ?, ?) + ") + .bind(playlist_id) + .bind(episode_id) + .bind(position) + .execute(pool) + .await?; + + added_episodes.insert(episode_id); + position += 1; + } + } + } + + tracing::info!("Fresh Releases playlist updated with {} episodes", added_episodes.len()); + Ok(added_episodes.len() as i32) + } + + async fn update_currently_listening_playlist_mysql(&self, pool: &Pool, playlist_id: i32) -> AppResult { + tracing::info!("Updating Currently Listening playlist"); + + // Clear existing playlist contents + sqlx::query("DELETE FROM PlaylistContents WHERE PlaylistID = ?") + .bind(playlist_id) + .execute(pool) + .await?; + + // Get all users + let users = sqlx::query("SELECT UserID FROM Users") + .fetch_all(pool) + .await?; + + let mut added_episodes: std::collections::HashSet = std::collections::HashSet::new(); + let mut position = 1; + + for user in users { + let user_id: i32 = user.try_get("UserID")?; + + tracing::info!("Processing Currently Listening for user {}", user_id); + + // Get episodes that are currently being listened to (started but not completed) + let episodes = sqlx::query(" + SELECT e.EpisodeID + FROM Episodes e + JOIN Podcasts p ON e.PodcastID = p.PodcastID + JOIN UserEpisodeHistory h ON e.EpisodeID = h.EpisodeID AND h.UserID = ? + WHERE h.ListenDuration > 0 AND h.ListenDuration < e.EpisodeDuration + ORDER BY h.ListenDate DESC + ") + .bind(user_id) + .fetch_all(pool) + .await?; + + // Add unique episodes to playlist + for episode in episodes { + let episode_id: i32 = episode.try_get("EpisodeID")?; + if !added_episodes.contains(&episode_id) { + sqlx::query(" + INSERT INTO PlaylistContents (PlaylistID, EpisodeID, Position) + VALUES (?, ?, ?) + ") + .bind(playlist_id) + .bind(episode_id) + .bind(position) + .execute(pool) + .await?; + + added_episodes.insert(episode_id); + position += 1; + } + } + } + + tracing::info!("Currently Listening playlist updated with {} episodes", added_episodes.len()); + Ok(added_episodes.len() as i32) + } + + async fn update_almost_done_playlist_mysql(&self, pool: &Pool, playlist_id: i32) -> AppResult { + tracing::info!("Updating Almost Done playlist"); + + // Clear existing playlist contents + sqlx::query("DELETE FROM PlaylistContents WHERE PlaylistID = ?") + .bind(playlist_id) + .execute(pool) + .await?; + + // Get all users + let users = sqlx::query("SELECT UserID FROM Users") + .fetch_all(pool) + .await?; + + let mut added_episodes: std::collections::HashSet = std::collections::HashSet::new(); + let mut position = 1; + + for user in users { + let user_id: i32 = user.try_get("UserID")?; + + tracing::info!("Processing Almost Done for user {}", user_id); + + // Get episodes that are almost done (75%+ listened and not completed) + let episodes = sqlx::query(" + SELECT e.EpisodeID + FROM Episodes e + JOIN Podcasts p ON e.PodcastID = p.PodcastID + JOIN UserEpisodeHistory h ON e.EpisodeID = h.EpisodeID AND h.UserID = ? + WHERE h.ListenDuration > 0 + AND h.ListenDuration < e.EpisodeDuration + AND (h.ListenDuration / NULLIF(e.EpisodeDuration, 0)) >= 0.75 + ORDER BY h.ListenDate DESC + ") + .bind(user_id) + .fetch_all(pool) + .await?; + + // Add unique episodes to playlist + for episode in episodes { + let episode_id: i32 = episode.try_get("EpisodeID")?; + if !added_episodes.contains(&episode_id) { + sqlx::query(" + INSERT INTO PlaylistContents (PlaylistID, EpisodeID, Position) + VALUES (?, ?, ?) + ") + .bind(playlist_id) + .bind(episode_id) + .bind(position) + .execute(pool) + .await?; + + added_episodes.insert(episode_id); + position += 1; + } + } + } + + tracing::info!("Almost Done playlist updated with {} episodes", added_episodes.len()); + Ok(added_episodes.len() as i32) + } + + // Build and execute playlist query for PostgreSQL - matches Python build_playlist_query exactly + async fn build_and_execute_playlist_query_postgres(&self, pool: &Pool, playlist: &sqlx::postgres::PgRow) -> AppResult { + let playlist_id: i32 = playlist.try_get("playlistid")?; + let user_id: i32 = playlist.try_get("userid")?; + let playlist_name: String = playlist.try_get("name")?; + let is_system_playlist: bool = playlist.try_get("issystemplaylist").unwrap_or(false); + + // Parse playlist configuration + let config = PlaylistConfig::from_postgres_row(playlist)?; + + // Determine if this playlist needs user history filtering + let needs_user_history = playlist_name == "Currently Listening" || + playlist_name == "Almost Done" || + !is_system_playlist; + + // Check for special optimized queries for partially played + if config.include_partially_played && !config.include_unplayed && !config.include_played { + return self.execute_partially_played_query_postgres(pool, playlist_id, user_id, &config).await; + } + + // Build the appropriate base query - FIXED PARAMETER INDEXING + let (base_query, params) = if is_system_playlist { + if needs_user_history { + // System playlist with user history filtering + (r#" + SELECT e.episodeid, p.podcastid, u.timezone + FROM "Episodes" e + JOIN "Podcasts" p ON e.podcastid = p.podcastid + LEFT JOIN "UserEpisodeHistory" h ON e.episodeid = h.episodeid AND h.userid = $2 + JOIN "Users" u ON u.userid = $3 + WHERE 1=1 + "#.to_string(), vec![user_id, user_id]) + } else { + // System playlist without user history filtering (Fresh Releases, etc.) + // But still need user context for history when needed + (r#" + SELECT e.episodeid, p.podcastid, u.timezone + FROM "Episodes" e + JOIN "Podcasts" p ON e.podcastid = p.podcastid + LEFT JOIN "UserEpisodeHistory" h ON e.episodeid = h.episodeid AND h.userid = $2 + JOIN "Users" u ON u.userid = $3 + WHERE 1=1 + "#.to_string(), vec![user_id, user_id]) + } + } else { + // User-specific playlist + (r#" + SELECT e.episodeid, p.podcastid, u.timezone + FROM "Episodes" e + JOIN "Podcasts" p ON e.podcastid = p.podcastid + LEFT JOIN "UserEpisodeHistory" h ON e.episodeid = h.episodeid AND h.userid = $2 + JOIN "Users" u ON u.userid = $3 + WHERE p.userid = $4 + "#.to_string(), vec![user_id, user_id, user_id]) + }; + + // Build the complete query with all filters + let (complete_query, all_params) = self.build_complete_postgres_query( + base_query, params, &config, playlist_id + )?; + + // Execute the query and insert episodes + self.execute_playlist_query_postgres(pool, &complete_query, &all_params, playlist_id).await + } + + // Build and execute playlist query for MySQL - matches Python build_playlist_query exactly + async fn build_and_execute_playlist_query_mysql(&self, pool: &Pool, playlist: &sqlx::mysql::MySqlRow) -> AppResult { + let playlist_id: i32 = playlist.try_get("PlaylistID")?; + let user_id: i32 = playlist.try_get("UserID")?; + let playlist_name: String = playlist.try_get("Name")?; + let is_system_playlist: bool = playlist.try_get("IsSystemPlaylist").unwrap_or(false); + + // Parse playlist configuration + let config = PlaylistConfig::from_mysql_row(playlist)?; + + // Determine if this playlist needs user history filtering + let needs_user_history = playlist_name == "Currently Listening" || + playlist_name == "Almost Done" || + !is_system_playlist; + + // Check for special optimized queries for partially played + if config.include_partially_played && !config.include_unplayed && !config.include_played { + return self.execute_partially_played_query_mysql(pool, playlist_id, user_id, &config).await; + } + + // Build the appropriate base query + let (base_query, params) = if is_system_playlist { + if needs_user_history { + // System playlist with user history filtering + (" + SELECT e.EpisodeID + FROM Episodes e + JOIN Podcasts p ON e.PodcastID = p.PodcastID + LEFT JOIN UserEpisodeHistory h ON e.EpisodeID = h.EpisodeID AND h.UserID = ? + JOIN Users u ON u.UserID = ? + WHERE 1=1 + ".to_string(), vec![user_id, user_id]) + } else { + // System playlist without user history filtering (Fresh Releases, etc.) + // But still need user context for history when needed + (" + SELECT e.EpisodeID + FROM Episodes e + JOIN Podcasts p ON e.PodcastID = p.PodcastID + LEFT JOIN UserEpisodeHistory h ON e.EpisodeID = h.EpisodeID AND h.UserID = ? + JOIN Users u ON u.UserID = ? + WHERE 1=1 + ".to_string(), vec![user_id, user_id]) + } + } else { + // User-specific playlist + (" + SELECT e.EpisodeID + FROM Episodes e + JOIN Podcasts p ON e.PodcastID = p.PodcastID + LEFT JOIN UserEpisodeHistory h ON e.EpisodeID = h.EpisodeID AND h.UserID = ? + JOIN Users u ON u.UserID = ? + WHERE p.UserID = ? + ".to_string(), vec![user_id, user_id, user_id]) + }; + + // Build the complete query with all filters + let (complete_query, all_params) = self.build_complete_mysql_query( + base_query, params, &config, playlist_id + )?; + + // Execute the query and insert episodes + self.execute_playlist_query_mysql(pool, &complete_query, &all_params, playlist_id).await + } + + // Execute optimized partially played query for PostgreSQL - FIXED VERSION + async fn execute_partially_played_query_postgres(&self, pool: &Pool, playlist_id: i32, user_id: i32, config: &PlaylistConfig) -> AppResult { + // Use direct INSERT without subquery for this optimized case - no alias scoping issues + let sort_order = config.get_postgres_sort_order().replace("ORDER BY ", ""); + + // Build base query + let mut query = format!(r#" + INSERT INTO "PlaylistContents" (playlistid, episodeid, position) + SELECT $1, e.episodeid, ROW_NUMBER() OVER (ORDER BY {}) as position + FROM "Episodes" e + JOIN "Podcasts" p ON e.podcastid = p.podcastid + JOIN "UserEpisodeHistory" h ON e.episodeid = h.episodeid + WHERE h.listenduration > 0 + AND h.listenduration < e.episodeduration + AND e.completed = FALSE + AND e.episodeduration > 0 + AND h.userid = $2 + "#, sort_order); + + let params = vec![playlist_id, user_id]; + + // Add progress filters using hardcoded values instead of parameters to avoid type issues + if let Some(min_progress) = config.play_progress_min { + query.push_str(&format!(" AND (h.listenduration::float / NULLIF(e.episodeduration, 0)) >= {}", min_progress / 100.0)); + } + + if let Some(max_progress) = config.play_progress_max { + query.push_str(&format!(" AND (h.listenduration::float / NULLIF(e.episodeduration, 0)) <= {}", max_progress / 100.0)); + } + + // Add limit + if let Some(max_episodes) = config.max_episodes { + query.push_str(&format!(" LIMIT {}", max_episodes)); + } + + tracing::info!("Executing partially played query with {} parameters", params.len()); + tracing::debug!("Query: {}", query); + + // Execute with proper parameter binding + let mut sqlx_query = sqlx::query(&query); + for param in ¶ms { + sqlx_query = sqlx_query.bind(*param); + } + + let result = sqlx_query.execute(pool).await?; + Ok(result.rows_affected() as i32) + } + + // Execute optimized partially played query for MySQL + async fn execute_partially_played_query_mysql(&self, pool: &Pool, playlist_id: i32, user_id: i32, config: &PlaylistConfig) -> AppResult { + // Use direct INSERT without subquery for this optimized case - no alias scoping issues + let sort_order = config.get_mysql_sort_order().replace("ORDER BY ", ""); + + let query = format!(" + INSERT INTO PlaylistContents (PlaylistID, EpisodeID, Position) + SELECT ?, e.EpisodeID, ROW_NUMBER() OVER (ORDER BY {}) as position + FROM Episodes e + JOIN Podcasts p ON e.PodcastID = p.PodcastID + JOIN UserEpisodeHistory h ON e.EpisodeID = h.EpisodeID + WHERE h.ListenDuration > 0 + AND h.ListenDuration < e.EpisodeDuration + AND e.Completed = FALSE + AND e.EpisodeDuration > 0 + AND h.UserID = ? + ", sort_order); + + // For simplicity, execute basic version - full implementation would add all filters + let result = sqlx::query(&query) + .bind(playlist_id) + .bind(user_id) + .execute(pool) + .await?; + + Ok(result.rows_affected() as i32) + } + + // Build complete PostgreSQL query with all filters - EXACT PYTHON MATCH + fn build_complete_postgres_query(&self, base_query: String, params: Vec, config: &PlaylistConfig, playlist_id: i32) -> AppResult<(String, Vec)> { + // Build proper SELECT query first - need to include columns for ordering in subquery + let mut select_columns = vec![ + "e.episodeid".to_string(), + "p.podcastid".to_string(), + "e.episodepubdate".to_string(), + "e.episodeduration".to_string(), + "COALESCE(h.listenduration, 0) as listenduration".to_string() + ]; + + let mut select_query = base_query.replace( + "SELECT e.episodeid, p.podcastid, u.timezone", + &format!("SELECT {}", select_columns.join(", ")) + ); + + let mut all_params = params; + let mut param_index = all_params.len() + 2; // +2 because playlist_id will be inserted as $1 + + // Add podcast filter (PostgreSQL IN clause support) + if let Some(ref podcast_ids) = config.podcast_ids { + println!("Playlist {}: Applying podcast filter with IDs: {:?}", playlist_id, podcast_ids); + if !podcast_ids.is_empty() { + if podcast_ids.len() == 1 { + println!("PostgreSQL single podcast filter: p.podcastid = {}", podcast_ids[0]); + select_query.push_str(&format!(" AND p.podcastid = ${}", param_index)); + all_params.push(podcast_ids[0]); + param_index += 1; + } else { + let placeholders: String = (0..podcast_ids.len()) + .map(|i| format!("${}", param_index + i)) + .collect::>() + .join(","); + println!("PostgreSQL multiple podcast filter: p.podcastid IN ({})", placeholders); + select_query.push_str(&format!(" AND p.podcastid IN ({})", placeholders)); + all_params.extend(podcast_ids); + param_index += podcast_ids.len(); + } + } else { + // If podcast_ids is Some but empty, user selected specific podcasts but list is empty + // This should return no results (exclude all podcasts) + println!("PostgreSQL podcast filter is empty - excluding all podcasts"); + select_query.push_str(" AND FALSE"); + } + } else { + println!("PostgreSQL no podcast filter applied - config.podcast_ids is None"); + } + + // Add duration filters + if let Some(min_duration) = config.min_duration { + select_query.push_str(&format!(" AND e.episodeduration >= ${}", param_index)); + all_params.push(min_duration); + param_index += 1; + } + + if let Some(max_duration) = config.max_duration { + select_query.push_str(&format!(" AND e.episodeduration <= ${}", param_index)); + all_params.push(max_duration); + param_index += 1; + } + + // Add time filter with timezone awareness + if let Some(time_filter_hours) = config.time_filter_hours { + select_query.push_str(&format!( + " AND e.episodepubdate AT TIME ZONE 'UTC' AT TIME ZONE COALESCE(u.timezone, 'UTC') > \ + (CURRENT_TIMESTAMP AT TIME ZONE 'UTC' AT TIME ZONE COALESCE(u.timezone, 'UTC') - INTERVAL '{}' HOUR)", + time_filter_hours + )); + } + + // Add play state filters - EXACT PYTHON LOGIC + println!("Playlist {}: Applying play state filters - unplayed: {}, partially_played: {}, played: {}", + playlist_id, config.include_unplayed, config.include_partially_played, config.include_played); + let mut play_state_conditions = Vec::new(); + + if config.include_unplayed { + play_state_conditions.push("h.listenduration IS NULL".to_string()); + println!("Playlist {}: Added unplayed episode filter", playlist_id); + } + + if config.include_partially_played { + let mut partial_condition = "(h.listenduration > 0 AND h.listenduration < e.episodeduration AND e.completed = FALSE)".to_string(); + + if let Some(min_progress) = config.play_progress_min { + partial_condition.push_str(&format!(" AND (h.listenduration::float / NULLIF(e.episodeduration, 0)) >= {}", min_progress / 100.0)); + } + + if let Some(max_progress) = config.play_progress_max { + partial_condition.push_str(&format!(" AND (h.listenduration::float / NULLIF(e.episodeduration, 0)) <= {}", max_progress / 100.0)); + } + + play_state_conditions.push(partial_condition); + } + + if config.include_played { + play_state_conditions.push("(e.completed = TRUE OR h.listenduration >= e.episodeduration)".to_string()); + tracing::debug!("Playlist {}: Added played/completed episode filter", playlist_id); + } + + if !play_state_conditions.is_empty() { + select_query.push_str(&format!(" AND ({})", play_state_conditions.join(" OR "))); + tracing::debug!("Playlist {}: Applied play state filter: ({})", playlist_id, play_state_conditions.join(" OR ")); + } else { + // If no play states are selected, exclude all episodes (return no results) + select_query.push_str(" AND FALSE"); + tracing::debug!("Playlist {}: No play states selected - excluding all episodes", playlist_id); + } + + // Explicitly exclude completed episodes when include_played is false + if !config.include_played { + select_query.push_str(" AND e.completed = FALSE"); + tracing::debug!("Playlist {}: Excluding all completed episodes (include_played=false)", playlist_id); + } + + // Note: No ORDER BY in inner query - final sorting is handled by ROW_NUMBER() OVER clause + + // Add limit + if let Some(max_episodes) = config.max_episodes { + select_query.push_str(&format!(" LIMIT {}", max_episodes)); + } + + // Now wrap the SELECT query in INSERT with ROW_NUMBER() - FIXED ALIAS SCOPING + println!("Playlist {}: group_by_podcast setting: {}", playlist_id, config.group_by_podcast); + let sort_for_insert = if config.group_by_podcast { + let sort_with_grouping = format!("ORDER BY episodes.podcastid, {}", config.get_postgres_outer_sort_order().replace("ORDER BY ", "")); + println!("Playlist {}: Using grouped sort: {}", playlist_id, sort_with_grouping); + sort_with_grouping + } else { + let sort_without_grouping = config.get_postgres_outer_sort_order(); + println!("Playlist {}: Using non-grouped sort: {}", playlist_id, sort_without_grouping); + println!("Playlist {}: Debug - raw sort_order: '{}'", playlist_id, config.sort_order); + sort_without_grouping + }; + + let insert_query = format!(r#" + INSERT INTO "PlaylistContents" (playlistid, episodeid, position) + SELECT $1, episodes.episodeid, ROW_NUMBER() OVER ({}) as position + FROM ({}) episodes + "#, sort_for_insert, select_query); + + println!("Playlist {}: Final insert query: {}", playlist_id, insert_query); + + // Final params: playlist_id first, then all query params + let mut final_params = vec![playlist_id]; + final_params.extend(all_params); + + Ok((insert_query, final_params)) + } + + // Build complete MySQL query with all filters - EXACT PYTHON MATCH + fn build_complete_mysql_query(&self, base_query: String, params: Vec, config: &PlaylistConfig, playlist_id: i32) -> AppResult<(String, Vec)> { + // Build proper SELECT query first - need to include columns for ordering in subquery + let mut select_columns = vec![ + "e.EpisodeID".to_string(), + "p.PodcastID".to_string(), + "e.EpisodePubDate".to_string(), + "e.EpisodeDuration".to_string(), + "COALESCE(h.ListenDuration, 0) as ListenDuration".to_string() + ]; + + let mut select_query = base_query.replace( + "SELECT e.EpisodeID", + &format!("SELECT {}", select_columns.join(", ")) + ); + + let mut all_params = params; + + // Add podcast filter (MySQL JSON/IN support) + if let Some(ref podcast_ids) = config.podcast_ids { + if !podcast_ids.is_empty() { + println!("MySQL applying podcast filter with IDs: {:?}", podcast_ids); + if podcast_ids.len() == 1 { + select_query.push_str(" AND p.PodcastID = ?"); + all_params.push(podcast_ids[0]); + println!("MySQL single podcast filter: p.PodcastID = {}", podcast_ids[0]); + } else { + let placeholders: String = podcast_ids.iter().map(|_| "?").collect::>().join(","); + select_query.push_str(&format!(" AND p.PodcastID IN ({})", placeholders)); + all_params.extend(podcast_ids); + println!("MySQL multiple podcast filter: p.PodcastID IN ({})", placeholders); + } + } else { + // If podcast_ids is Some but empty, user selected specific podcasts but list is empty + // This should return no results (exclude all podcasts) + println!("MySQL podcast filter is empty - excluding all podcasts"); + select_query.push_str(" AND FALSE"); + } + } else { + println!("MySQL no podcast_ids specified, no filter applied"); + } + + // Add duration filters + if let Some(min_duration) = config.min_duration { + select_query.push_str(" AND e.EpisodeDuration >= ?"); + all_params.push(min_duration); + } + + if let Some(max_duration) = config.max_duration { + select_query.push_str(" AND e.EpisodeDuration <= ?"); + all_params.push(max_duration); + } + + // Add time filter with timezone awareness + if let Some(time_filter_hours) = config.time_filter_hours { + select_query.push_str(&format!( + " AND CONVERT_TZ(e.EpisodePubDate, 'UTC', COALESCE(u.TimeZone, 'UTC')) > \ + DATE_SUB(CONVERT_TZ(NOW(), 'UTC', COALESCE(u.TimeZone, 'UTC')), INTERVAL {} HOUR)", + time_filter_hours + )); + } + + // Add play state filters - EXACT PYTHON LOGIC + let mut play_state_conditions = Vec::new(); + + if config.include_unplayed { + play_state_conditions.push("h.ListenDuration IS NULL".to_string()); + } + + if config.include_partially_played { + let mut partial_condition = "(h.ListenDuration > 0 AND h.ListenDuration < e.EpisodeDuration AND e.Completed = FALSE)".to_string(); + + if let Some(min_progress) = config.play_progress_min { + partial_condition.push_str(&format!(" AND (h.ListenDuration / NULLIF(e.EpisodeDuration, 0)) >= {}", min_progress / 100.0)); + } + + if let Some(max_progress) = config.play_progress_max { + partial_condition.push_str(&format!(" AND (h.ListenDuration / NULLIF(e.EpisodeDuration, 0)) <= {}", max_progress / 100.0)); + } + + play_state_conditions.push(partial_condition); + } + + if config.include_played { + play_state_conditions.push("(e.Completed = TRUE OR h.ListenDuration >= e.EpisodeDuration)".to_string()); + } + + if !play_state_conditions.is_empty() { + select_query.push_str(&format!(" AND ({})", play_state_conditions.join(" OR "))); + } else { + // If no play states are selected, exclude all episodes (return no results) + select_query.push_str(" AND FALSE"); + } + + // Explicitly exclude completed episodes when include_played is false + if !config.include_played { + select_query.push_str(" AND e.Completed = FALSE"); + } + + // Note: No ORDER BY in inner query - final sorting is handled by ROW_NUMBER() OVER clause + + // Add limit + if let Some(max_episodes) = config.max_episodes { + select_query.push_str(&format!(" LIMIT {}", max_episodes)); + } + + // Now wrap the SELECT query in INSERT with ROW_NUMBER() - FIXED ALIAS SCOPING + println!("Playlist {}: group_by_podcast setting: {}", playlist_id, config.group_by_podcast); + let sort_for_insert = if config.group_by_podcast { + let sort_with_grouping = format!("ORDER BY episodes.PodcastID, {}", config.get_mysql_outer_sort_order().replace("ORDER BY ", "")); + println!("Playlist {}: Using grouped sort: {}", playlist_id, sort_with_grouping); + sort_with_grouping + } else { + let sort_without_grouping = config.get_mysql_outer_sort_order(); + println!("Playlist {}: Using non-grouped sort: {}", playlist_id, sort_without_grouping); + sort_without_grouping + }; + + let insert_query = format!(r#" + INSERT INTO PlaylistContents (PlaylistID, EpisodeID, Position) + SELECT ?, episodes.EpisodeID, ROW_NUMBER() OVER ({}) as position + FROM ({}) episodes + "#, sort_for_insert, select_query); + + // Final params: playlist_id first, then all query params + let mut final_params = vec![playlist_id]; + final_params.extend(all_params); + + Ok((insert_query, final_params)) + } + + // Add play state filters for PostgreSQL + fn add_play_state_filters_postgres(&self, query: &mut String, params: &mut Vec, param_index: &mut usize, config: &PlaylistConfig) -> AppResult<()> { + let mut play_state_conditions = Vec::new(); + + if config.include_unplayed { + play_state_conditions.push("h.listenduration IS NULL".to_string()); + } + + if config.include_partially_played { + let mut partial_condition = "(h.listenduration > 0 AND h.listenduration < e.episodeduration AND e.completed = FALSE)".to_string(); + + if let Some(min_progress) = config.play_progress_min { + partial_condition.push_str(&format!(" AND (h.listenduration::float / NULLIF(e.episodeduration, 0)) >= {}", min_progress / 100.0)); + } + + if let Some(max_progress) = config.play_progress_max { + partial_condition.push_str(&format!(" AND (h.listenduration::float / NULLIF(e.episodeduration, 0)) <= {}", max_progress / 100.0)); + } + + play_state_conditions.push(partial_condition); + } + + if config.include_played { + play_state_conditions.push("h.listenduration >= e.episodeduration".to_string()); + } + + if !play_state_conditions.is_empty() { + query.push_str(&format!(" AND ({})", play_state_conditions.join(" OR "))); + } + + Ok(()) + } + + // Add play state filters for MySQL + fn add_play_state_filters_mysql(&self, query: &mut String, config: &PlaylistConfig) -> AppResult<()> { + let mut play_state_conditions = Vec::new(); + + if config.include_unplayed { + play_state_conditions.push("h.ListenDuration IS NULL".to_string()); + } + + if config.include_partially_played { + let mut partial_condition = "(h.ListenDuration > 0 AND h.ListenDuration < e.EpisodeDuration AND e.Completed = FALSE)".to_string(); + + if let Some(min_progress) = config.play_progress_min { + partial_condition.push_str(&format!(" AND (h.ListenDuration / NULLIF(e.EpisodeDuration, 0)) >= {}", min_progress / 100.0)); + } + + if let Some(max_progress) = config.play_progress_max { + partial_condition.push_str(&format!(" AND (h.ListenDuration / NULLIF(e.EpisodeDuration, 0)) <= {}", max_progress / 100.0)); + } + + play_state_conditions.push(partial_condition); + } + + if config.include_played { + play_state_conditions.push("h.ListenDuration >= e.EpisodeDuration".to_string()); + } + + if !play_state_conditions.is_empty() { + query.push_str(&format!(" AND ({})", play_state_conditions.join(" OR "))); + } + + Ok(()) + } + + // Add common filters for PostgreSQL (simplified helper) + fn add_common_filters_postgres(&self, _query: &mut String, _params: &mut Vec + Send + 'static>>, _param_index: &mut usize, _config: &PlaylistConfig, _user_id: i32) -> AppResult<()> { + // Implementation for duration, time, podcast filters + // Simplified for now due to complexity of dynamic parameters + Ok(()) + } + + // Helper function to parse categories JSON string into HashMap - matches Python version + fn parse_categories_json(&self, categories_str: &str) -> Option> { + if categories_str.is_empty() { + return Some(std::collections::HashMap::new()); + } + + if categories_str.starts_with('{') { + // Try to parse as JSON first + if let Ok(parsed) = serde_json::from_str::>(categories_str) { + return Some(parsed); + } + } else { + // Fall back to comma-separated parsing like Python version + let mut result = std::collections::HashMap::new(); + for (i, cat) in categories_str.split(',').enumerate() { + result.insert(i.to_string(), cat.trim().to_string()); + } + return Some(result); + } + + // Return empty map if parsing fails + Some(std::collections::HashMap::new()) + } + + // Execute the final playlist query for PostgreSQL - FIXED VERSION + async fn execute_playlist_query_postgres(&self, pool: &Pool, query: &str, params: &[i32], _playlist_id: i32) -> AppResult { + println!("PostgreSQL executing playlist query with {} parameters", params.len()); + println!("PostgreSQL Query: {}", query); + println!("PostgreSQL Params: {:?}", params); + + // Build query with proper parameter binding + let mut sqlx_query = sqlx::query(query); + for param in params { + sqlx_query = sqlx_query.bind(*param); + } + + let result = sqlx_query.execute(pool).await?; + println!("PostgreSQL playlist query affected {} rows", result.rows_affected()); + Ok(result.rows_affected() as i32) + } + + // Execute the final playlist query for MySQL + async fn execute_playlist_query_mysql(&self, pool: &Pool, query: &str, params: &[i32], _playlist_id: i32) -> AppResult { + tracing::info!("Executing MySQL playlist query with {} parameters", params.len()); + tracing::debug!("Query: {}", query); + tracing::debug!("Params: {:?}", params); + + // Build query with proper parameter binding + let mut sqlx_query = sqlx::query(query); + for param in params { + sqlx_query = sqlx_query.bind(*param); + } + + let result = sqlx_query.execute(pool) + .await?; + + Ok(result.rows_affected() as i32) + } + + // Get podcast details - matches Python get_podcast_details function exactly + pub async fn get_podcast_details(&self, user_id: i32, podcast_id: i32) -> AppResult { + match self { + DatabasePool::Postgres(pool) => { + // First try to get podcast for specific user + let mut podcast_row = sqlx::query(r#" + SELECT podcastid, podcastname, feedurl, description, author, artworkurl, + explicit, episodecount, categories, websiteurl, podcastindexid, isyoutubechannel, + userid, autodownload, startskip, endskip, username, password, notificationsenabled, feedcutoffdays, + playbackspeed, playbackspeedcustomized + FROM "Podcasts" + WHERE podcastid = $1 AND userid = $2 + "#) + .bind(podcast_id) + .bind(user_id) + .fetch_optional(pool) + .await?; + + // Fallback to admin/public podcasts (userid = 1) if not found for user + if podcast_row.is_none() { + podcast_row = sqlx::query(r#" + SELECT podcastid, podcastname, feedurl, description, author, artworkurl, + explicit, episodecount, categories, websiteurl, podcastindexid, isyoutubechannel, + userid, autodownload, startskip, endskip, username, password, notificationsenabled, feedcutoffdays, + playbackspeed, playbackspeedcustomized + FROM "Podcasts" + WHERE podcastid = $1 AND userid = 1 + "#) + .bind(podcast_id) + .fetch_optional(pool) + .await?; + } + + let row = podcast_row.ok_or_else(|| AppError::not_found("Podcast not found"))?; + + // Get episode count (special handling for YouTube channels) + let is_youtube: bool = row.try_get("isyoutubechannel").unwrap_or(false); + let episode_count = if is_youtube { + // Get count from YouTubeVideos table for YouTube channels + let count_row = sqlx::query(r#"SELECT COUNT(*) as count FROM "YouTubeVideos" WHERE podcastid = $1"#) + .bind(podcast_id) + .fetch_one(pool) + .await?; + count_row.try_get::("count")? as i32 + } else { + row.try_get("episodecount").unwrap_or(0) + }; + + // Get categories and parse to HashMap - matches Python version exactly + let categories_str = row.try_get::("categories").unwrap_or_else(|_| String::new()); + let categories = self.parse_categories_json(&categories_str); + + Ok(serde_json::json!({ + "podcastid": row.try_get::("podcastid")?, + "podcastindexid": row.try_get::("podcastindexid").unwrap_or(0), + "podcastname": row.try_get::("podcastname").unwrap_or_else(|_| "Unknown Podcast".to_string()), + "artworkurl": row.try_get::, _>("artworkurl").unwrap_or_default().unwrap_or_else(|| String::new()), + "author": row.try_get::("author").unwrap_or_else(|_| "Unknown Author".to_string()), + "categories": categories, + "description": row.try_get::("description").unwrap_or_else(|_| String::new()), + "episodecount": episode_count, + "feedurl": row.try_get::("feedurl").unwrap_or_else(|_| String::new()), + "websiteurl": row.try_get::("websiteurl").unwrap_or_else(|_| String::new()), + "explicit": row.try_get::("explicit").unwrap_or(false), + "userid": row.try_get::("userid")?, + "autodownload": row.try_get::("autodownload").unwrap_or(false), + "startskip": row.try_get::("startskip").unwrap_or(0), + "endskip": row.try_get::("endskip").unwrap_or(0), + "username": row.try_get::, _>("username")?, + "password": row.try_get::, _>("password")?, + "isyoutubechannel": is_youtube, + "notificationsenabled": row.try_get::("notificationsenabled").unwrap_or(false), + "feedcutoffdays": row.try_get::("feedcutoffdays").unwrap_or(0), + "playbackspeedcustomized": row.try_get::("playbackspeedcustomized").unwrap_or(false), + "playbackspeed": row.try_get::("playbackspeed").unwrap_or(1.0) + })) + } + DatabasePool::MySQL(pool) => { + // First try to get podcast for specific user + let mut podcast_row = sqlx::query(r#" + SELECT PodcastID, PodcastName, FeedURL, Description, Author, ArtworkURL, + Explicit, EpisodeCount, Categories, WebsiteURL, PodcastIndexID, IsYouTubeChannel, + UserID, AutoDownload, StartSkip, EndSkip, Username, Password, NotificationsEnabled, FeedCutoffDays, + PlaybackSpeed, PlaybackSpeedCustomized + FROM Podcasts + WHERE PodcastID = ? AND UserID = ? + "#) + .bind(podcast_id) + .bind(user_id) + .fetch_optional(pool) + .await?; + + // Fallback to admin/public podcasts (UserID = 1) if not found for user + if podcast_row.is_none() { + podcast_row = sqlx::query(r#" + SELECT PodcastID, PodcastName, FeedURL, Description, Author, ArtworkURL, + Explicit, EpisodeCount, Categories, WebsiteURL, PodcastIndexID, IsYouTubeChannel, + UserID, AutoDownload, StartSkip, EndSkip, Username, Password, NotificationsEnabled, FeedCutoffDays, + PlaybackSpeed, PlaybackSpeedCustomized + FROM Podcasts + WHERE PodcastID = ? AND UserID = 1 + "#) + .bind(podcast_id) + .fetch_optional(pool) + .await?; + } + + let row = podcast_row.ok_or_else(|| AppError::not_found("Podcast not found"))?; + + // Get episode count (special handling for YouTube channels) + let is_youtube: bool = row.try_get("IsYouTubeChannel").unwrap_or(false); + let episode_count = if is_youtube { + // Get count from YouTubeVideos table for YouTube channels + let count_row = sqlx::query("SELECT COUNT(*) as count FROM YouTubeVideos WHERE PodcastID = ?") + .bind(podcast_id) + .fetch_one(pool) + .await?; + count_row.try_get::("count")? as i32 + } else { + row.try_get("EpisodeCount").unwrap_or(0) + }; + + // Get categories and parse to HashMap - matches Python version exactly + let categories_str = row.try_get::("Categories").unwrap_or_else(|_| String::new()); + let categories = self.parse_categories_json(&categories_str); + + Ok(serde_json::json!({ + "podcastid": row.try_get::("PodcastID")?, + "podcastindexid": row.try_get::("PodcastIndexID").unwrap_or(0), + "podcastname": row.try_get::("PodcastName").unwrap_or_else(|_| "Unknown Podcast".to_string()), + "artworkurl": row.try_get::, _>("ArtworkURL").unwrap_or_default().unwrap_or_else(|| String::new()), + "author": row.try_get::("Author").unwrap_or_else(|_| "Unknown Author".to_string()), + "categories": categories, + "description": row.try_get::("Description").unwrap_or_else(|_| String::new()), + "episodecount": episode_count, + "feedurl": row.try_get::("FeedURL").unwrap_or_else(|_| String::new()), + "websiteurl": row.try_get::("WebsiteURL").unwrap_or_else(|_| String::new()), + "explicit": row.try_get::("Explicit").unwrap_or(0) != 0, + "userid": row.try_get::("UserID")?, + "autodownload": row.try_get::("AutoDownload").unwrap_or(0) != 0, + "startskip": row.try_get::("StartSkip").unwrap_or(0), + "endskip": row.try_get::("EndSkip").unwrap_or(0), + "username": row.try_get::, _>("Username")?, + "password": row.try_get::, _>("Password")?, + "isyoutubechannel": is_youtube, + "notificationsenabled": row.try_get::("NotificationsEnabled").unwrap_or(0) != 0, + "feedcutoffdays": row.try_get::("FeedCutoffDays").unwrap_or(0), + "playbackspeedcustomized": row.try_get::("PlaybackSpeedCustomized").unwrap_or(0) != 0, + "playbackspeed": if let Ok(speed) = row.try_get::("PlaybackSpeed") { + speed.to_f64().unwrap_or(1.0) + } else { + 1.0 + } + })) + } + } + } + + // OIDC Provider Management - matches Python OIDC functions + + // Get OIDC provider by client ID - for callback processing + pub async fn get_oidc_provider_by_client_id(&self, client_id: &str) -> AppResult> { + match self { + DatabasePool::Postgres(pool) => { + let row = sqlx::query(r#" + SELECT providerid, providername, clientid, clientsecret, authorizationurl, + tokenurl, userinfourl, scope, buttoncolor, buttontext, buttontextcolor, + iconsvg, enabled, nameclaim, emailclaim, usernameclaim, rolesclaim, + userrole, adminrole + FROM "OIDCProviders" + WHERE clientid = $1 AND enabled = true + "#) + .bind(client_id) + .fetch_optional(pool) + .await?; + + if let Some(row) = row { + Ok(Some(serde_json::json!({ + "provider_id": row.try_get::("providerid")?, + "provider_name": row.try_get::("providername")?, + "client_id": row.try_get::("clientid")?, + "client_secret": row.try_get::("clientsecret")?, + "authorization_url": row.try_get::("authorizationurl")?, + "token_url": row.try_get::("tokenurl")?, + "userinfo_url": row.try_get::("userinfourl")?, + "scope": row.try_get::("scope")?, + "button_color": row.try_get::("buttoncolor")?, + "button_text": row.try_get::("buttontext")?, + "button_text_color": row.try_get::("buttontextcolor")?, + "icon_svg": row.try_get::, _>("iconsvg")?, + "enabled": row.try_get::("enabled")?, + "name_claim": row.try_get::, _>("nameclaim")?, + "email_claim": row.try_get::, _>("emailclaim")?, + "username_claim": row.try_get::, _>("usernameclaim")?, + "roles_claim": row.try_get::, _>("rolesclaim")?, + "user_role": row.try_get::, _>("userrole")?, + "admin_role": row.try_get::, _>("adminrole")? + }))) + } else { + Ok(None) + } + } + DatabasePool::MySQL(pool) => { + let row = sqlx::query(r#" + SELECT ProviderID, ProviderName, ClientID, ClientSecret, AuthorizationURL, + TokenURL, UserInfoURL, Scope, ButtonColor, ButtonText, ButtonTextColor, + IconSVG, Enabled, NameClaim, EmailClaim, UsernameClaim, RolesClaim, + UserRole, AdminRole + FROM OIDCProviders + WHERE ClientID = ? AND Enabled = true + "#) + .bind(client_id) + .fetch_optional(pool) + .await?; + + if let Some(row) = row { + Ok(Some(serde_json::json!({ + "provider_id": row.try_get::("ProviderID")?, + "provider_name": row.try_get::("ProviderName")?, + "client_id": row.try_get::("ClientID")?, + "client_secret": row.try_get::("ClientSecret")?, + "authorization_url": row.try_get::("AuthorizationURL")?, + "token_url": row.try_get::("TokenURL")?, + "userinfo_url": row.try_get::("UserInfoURL")?, + "scope": row.try_get::("Scope")?, + "button_color": row.try_get::("ButtonColor")?, + "button_text": row.try_get::("ButtonText")?, + "button_text_color": row.try_get::("ButtonTextColor")?, + "icon_svg": row.try_get::, _>("IconSVG")?, + "enabled": row.try_get::("Enabled")? != 0, + "name_claim": row.try_get::, _>("NameClaim")?, + "email_claim": row.try_get::, _>("EmailClaim")?, + "username_claim": row.try_get::, _>("UsernameClaim")?, + "roles_claim": row.try_get::, _>("RolesClaim")?, + "user_role": row.try_get::, _>("UserRole")?, + "admin_role": row.try_get::, _>("AdminRole")? + }))) + } else { + Ok(None) + } + } + } + } + + // Get OIDC provider - matches Python get_oidc_provider function EXACTLY + pub async fn get_oidc_provider(&self, client_id: &str) -> AppResult, Option, Option, Option, Option, Option)>> { + match self { + DatabasePool::Postgres(pool) => { + let result = sqlx::query(r#" + SELECT providerid, clientid, clientsecret, tokenurl, userinfourl, nameclaim, emailclaim, usernameclaim, rolesclaim, userrole, adminrole + FROM "OIDCProviders" + WHERE clientid = $1 AND enabled = true + "#) + .bind(client_id) + .fetch_optional(pool) + .await?; + + if let Some(row) = result { + Ok(Some(( + row.try_get("providerid")?, + row.try_get("clientid")?, + row.try_get("clientsecret")?, + row.try_get("tokenurl")?, + row.try_get("userinfourl")?, + row.try_get("nameclaim")?, + row.try_get("emailclaim")?, + row.try_get("usernameclaim")?, + row.try_get("rolesclaim")?, + row.try_get("userrole")?, + row.try_get("adminrole")?, + ))) + } else { + Ok(None) + } + }, + DatabasePool::MySQL(pool) => { + let result = sqlx::query(r#" + SELECT ProviderID, ClientID, ClientSecret, TokenURL, UserInfoURL, NameClaim, EmailClaim, UsernameClaim, RolesClaim, UserRole, AdminRole + FROM OIDCProviders + WHERE ClientID = ? AND Enabled = true + "#) + .bind(client_id) + .fetch_optional(pool) + .await?; + + if let Some(row) = result { + Ok(Some(( + row.try_get("ProviderID")?, + row.try_get("ClientID")?, + row.try_get("ClientSecret")?, + row.try_get("TokenURL")?, + row.try_get("UserInfoURL")?, + row.try_get("NameClaim")?, + row.try_get("EmailClaim")?, + row.try_get("UsernameClaim")?, + row.try_get("RolesClaim")?, + row.try_get("UserRole")?, + row.try_get("AdminRole")?, + ))) + } else { + Ok(None) + } + } + } + } + + // Get user by email - matches Python get_user_by_email function EXACTLY + pub async fn get_user_by_email(&self, email: &str) -> AppResult, Option, bool)>> { + match self { + DatabasePool::Postgres(pool) => { + let result = sqlx::query(r#" + SELECT userid, email, username, fullname, isadmin + FROM "Users" + WHERE email = $1 + "#) + .bind(email) + .fetch_optional(pool) + .await?; + + if let Some(row) = result { + Ok(Some(( + row.try_get("userid")?, + row.try_get("email")?, + row.try_get("username")?, + row.try_get("fullname")?, + row.try_get("isadmin")?, + ))) + } else { + Ok(None) + } + }, + DatabasePool::MySQL(pool) => { + let result = sqlx::query(r#" + SELECT UserID, Email, Username, Fullname, IsAdmin + FROM Users + WHERE Email = ? + "#) + .bind(email) + .fetch_optional(pool) + .await?; + + if let Some(row) = result { + let is_admin: i32 = row.try_get("IsAdmin")?; + Ok(Some(( + row.try_get("UserID")?, + row.try_get("Email")?, + row.try_get("Username")?, + row.try_get("Fullname")?, + is_admin != 0, + ))) + } else { + Ok(None) + } + } + } + } + + // Check if username exists - matches Python check_usernames function EXACTLY + pub async fn check_usernames(&self, username: &str) -> AppResult { + match self { + DatabasePool::Postgres(pool) => { + let result = sqlx::query(r#"SELECT COUNT(*) as count FROM "Users" WHERE username = $1"#) + .bind(username) + .fetch_one(pool) + .await?; + let count: i64 = result.try_get("count")?; + Ok(count > 0) + }, + DatabasePool::MySQL(pool) => { + let result = sqlx::query(r#"SELECT COUNT(*) as count FROM Users WHERE Username = ?"#) + .bind(username) + .fetch_one(pool) + .await?; + let count: i64 = result.try_get("count")?; + Ok(count > 0) + } + } + } + + // Get user API key - matches Python get_user_api_key function EXACTLY + pub async fn get_user_api_key(&self, user_id: i32) -> AppResult> { + match self { + DatabasePool::Postgres(pool) => { + let result = sqlx::query(r#" + SELECT apikey FROM "APIKeys" + WHERE userid = $1 + ORDER BY created DESC + LIMIT 1 + "#) + .bind(user_id) + .fetch_optional(pool) + .await?; + + if let Some(row) = result { + Ok(Some(row.try_get("apikey")?)) + } else { + Ok(None) + } + }, + DatabasePool::MySQL(pool) => { + let result = sqlx::query(r#" + SELECT APIKey FROM APIKeys + WHERE UserID = ? + ORDER BY Created DESC + LIMIT 1 + "#) + .bind(user_id) + .fetch_optional(pool) + .await?; + + if let Some(row) = result { + Ok(Some(row.try_get("APIKey")?)) + } else { + Ok(None) + } + } + } + } + + // Create OIDC user - matches Python create_oidc_user function EXACTLY + pub async fn create_oidc_user(&self, email: &str, fullname: &str, username: &str) -> AppResult { + use base64::{Engine as _, engine::general_purpose::STANDARD}; + use rand::Rng; + + // Create salt exactly like Python version + let salt_bytes: [u8; 16] = rand::rng().random(); + let salt = STANDARD.encode(salt_bytes); + let hashed_password = format!("$argon2id$v=19$m=65536,t=3,p=4${}${}_OIDC_ACCOUNT_NO_PASSWORD", + salt, "X".repeat(43)); + + match self { + DatabasePool::Postgres(pool) => { + let result = sqlx::query(r#" + INSERT INTO "Users" (fullname, username, email, hashed_pw, isadmin) + VALUES ($1, $2, $3, $4, false) + RETURNING userid + "#) + .bind(fullname) + .bind(username) + .bind(email) + .bind(&hashed_password) + .fetch_one(pool) + .await?; + + let user_id: i32 = result.try_get("userid")?; + + // Add default user settings + sqlx::query(r#"INSERT INTO "UserSettings" (userid, theme) VALUES ($1, $2)"#) + .bind(user_id) + .bind("Nordic") + .execute(pool) + .await?; + + // Add default user stats + sqlx::query(r#"INSERT INTO "UserStats" (userid) VALUES ($1)"#) + .bind(user_id) + .execute(pool) + .await?; + + Ok(user_id) + }, + DatabasePool::MySQL(pool) => { + let result = sqlx::query(r#" + INSERT INTO Users (Fullname, Username, Email, Hashed_PW, IsAdmin) + VALUES (?, ?, ?, ?, 0) + "#) + .bind(fullname) + .bind(username) + .bind(email) + .bind(&hashed_password) + .execute(pool) + .await?; + + let user_id = result.last_insert_id() as i32; + + // Add default user settings + sqlx::query(r#"INSERT INTO UserSettings (UserID, Theme) VALUES (?, ?)"#) + .bind(user_id) + .bind("Nordic") + .execute(pool) + .await?; + + // Add default user stats + sqlx::query(r#"INSERT INTO UserStats (UserID) VALUES (?)"#) + .bind(user_id) + .execute(pool) + .await?; + + Ok(user_id) + } + } + } + + + + // Check if username exists - helper for OIDC user creation + pub async fn username_exists(&self, username: &str) -> AppResult { + match self { + DatabasePool::Postgres(pool) => { + let result = sqlx::query(r#"SELECT 1 FROM "Users" WHERE username = $1 LIMIT 1"#) + .bind(username) + .fetch_optional(pool) + .await?; + Ok(result.is_some()) + } + DatabasePool::MySQL(pool) => { + let result = sqlx::query("SELECT 1 FROM Users WHERE Username = ? LIMIT 1") + .bind(username) + .fetch_optional(pool) + .await?; + Ok(result.is_some()) + } + } + } + + // Create or get API key for user - for OIDC login completion + pub async fn create_or_get_api_key(&self, user_id: i32) -> AppResult { + match self { + DatabasePool::Postgres(pool) => { + // Check for existing API key + let existing_key = sqlx::query(r#"SELECT apikey FROM "APIKeys" WHERE userid = $1 LIMIT 1"#) + .bind(user_id) + .fetch_optional(pool) + .await?; + + if let Some(row) = existing_key { + return Ok(row.try_get("apikey")?); + } + + // Generate new API key + let api_key = format!("pk_{}", uuid::Uuid::new_v4().simple()); + + sqlx::query(r#"INSERT INTO "APIKeys" (userid, apikey) VALUES ($1, $2)"#) + .bind(user_id) + .bind(&api_key) + .execute(pool) + .await?; + + Ok(api_key) + } + DatabasePool::MySQL(pool) => { + let existing_key = sqlx::query("SELECT APIKey FROM APIKeys WHERE UserID = ? LIMIT 1") + .bind(user_id) + .fetch_optional(pool) + .await?; + + if let Some(row) = existing_key { + return Ok(row.try_get("APIKey")?); + } + + let api_key = format!("pk_{}", uuid::Uuid::new_v4().simple()); + + sqlx::query("INSERT INTO APIKeys (UserID, APIKey) VALUES (?, ?)") + .bind(user_id) + .bind(&api_key) + .execute(pool) + .await?; + + Ok(api_key) + } + } + } + + // Get playlist episodes - matches Python get_playlist_episodes function exactly + pub async fn get_playlist_episodes(&self, user_id: i32, playlist_id: i32) -> AppResult { + match self { + DatabasePool::Postgres(pool) => { + // Get playlist info with episode count - matches Python exactly + let playlist_row = sqlx::query(r#" + SELECT + p.name, + p.description, + (SELECT COUNT(*) + FROM "PlaylistContents" pc + JOIN "Episodes" e ON pc.episodeid = e.episodeid + JOIN "Podcasts" pod ON e.podcastid = pod.podcastid + LEFT JOIN "UserEpisodeHistory" h ON e.episodeid = h.episodeid AND h.userid = $1 + WHERE pc.playlistid = p.playlistid + AND (p.issystemplaylist = FALSE OR + (p.issystemplaylist = TRUE AND + (h.episodeid IS NOT NULL OR pod.userid = $2))) + ) as episode_count, + p.iconname, + p.issystemplaylist + FROM "Playlists" p + WHERE p.playlistid = $3 AND (p.userid = $4 OR p.issystemplaylist = TRUE) + "#) + .bind(user_id) + .bind(user_id) + .bind(playlist_id) + .bind(user_id) + .fetch_optional(pool) + .await?; + + if playlist_row.is_none() { + return Err(AppError::not_found("Playlist not found")); + } + + let row = playlist_row.unwrap(); + let playlist_name: String = row.try_get("name")?; + let playlist_description: String = row.try_get("description").unwrap_or_default(); + let episode_count: i64 = row.try_get("episodecount")?; + let icon_name: String = row.try_get("iconname").unwrap_or_default(); + let is_system_playlist: bool = row.try_get("issystemplaylist")?; + + let episodes_rows = if is_system_playlist { + // For system playlists, update playlist first to get current episodes, then filter to user's podcasts + self.update_playlist_contents(playlist_id).await?; + + // Same query as user playlists but with additional filter for user's podcasts + sqlx::query(r#" + SELECT DISTINCT + "Episodes".episodeid, + "Episodes".episodetitle, + "Episodes".episodepubdate, + "Episodes".episodedescription, + CASE + WHEN "Podcasts".usepodcastcoverscustomized = TRUE AND "Podcasts".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + WHEN "Users".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + ELSE "Episodes".episodeartwork + END as episodeartwork, + "Episodes".episodeurl, + "Episodes".episodeduration, + "Episodes".completed, + "Podcasts".podcastname, + "Podcasts".podcastid, + "Podcasts".isyoutubechannel as is_youtube, + "UserEpisodeHistory".listenduration, + CASE WHEN "SavedEpisodes".episodeid IS NOT NULL THEN TRUE ELSE FALSE END AS saved, + CASE WHEN "EpisodeQueue".episodeid IS NOT NULL THEN TRUE ELSE FALSE END AS queued, + CASE WHEN "DownloadedEpisodes".episodeid IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded, + "PlaylistContents".dateadded + FROM "PlaylistContents" + JOIN "Episodes" ON "PlaylistContents".episodeid = "Episodes".episodeid + JOIN "Podcasts" ON "Episodes".podcastid = "Podcasts".podcastid + LEFT JOIN "Users" ON "Podcasts".userid = "Users".userid + LEFT JOIN "UserEpisodeHistory" ON "Episodes".episodeid = "UserEpisodeHistory".episodeid + AND "UserEpisodeHistory".userid = $1 + LEFT JOIN "SavedEpisodes" ON "Episodes".episodeid = "SavedEpisodes".episodeid + AND "SavedEpisodes".userid = $1 + LEFT JOIN "EpisodeQueue" ON "Episodes".episodeid = "EpisodeQueue".episodeid + AND "EpisodeQueue".userid = $1 + AND "EpisodeQueue".is_youtube = FALSE + LEFT JOIN "DownloadedEpisodes" ON "Episodes".episodeid = "DownloadedEpisodes".episodeid + AND "DownloadedEpisodes".userid = $1 + WHERE "PlaylistContents".playlistid = $2 + AND "Podcasts".userid = $1 + ORDER BY "PlaylistContents".dateadded DESC + "#) + .bind(user_id) + .bind(playlist_id) + .fetch_all(pool) + .await? + } else { + // For user playlists, use existing PlaylistContents logic + sqlx::query(r#" + SELECT DISTINCT + "Episodes".episodeid, + "Episodes".episodetitle, + "Episodes".episodepubdate, + "Episodes".episodedescription, + CASE + WHEN "Podcasts".usepodcastcoverscustomized = TRUE AND "Podcasts".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + WHEN "Users".usepodcastcovers = TRUE THEN "Podcasts".artworkurl + ELSE "Episodes".episodeartwork + END as episodeartwork, + "Episodes".episodeurl, + "Episodes".episodeduration, + "Episodes".completed, + "Podcasts".podcastname, + "Podcasts".podcastid, + "Podcasts".isyoutubechannel as is_youtube, + "UserEpisodeHistory".listenduration, + CASE WHEN "SavedEpisodes".episodeid IS NOT NULL THEN TRUE ELSE FALSE END AS saved, + CASE WHEN "EpisodeQueue".episodeid IS NOT NULL THEN TRUE ELSE FALSE END AS queued, + CASE WHEN "DownloadedEpisodes".episodeid IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded, + "PlaylistContents".dateadded + FROM "PlaylistContents" + JOIN "Episodes" ON "PlaylistContents".episodeid = "Episodes".episodeid + JOIN "Podcasts" ON "Episodes".podcastid = "Podcasts".podcastid + LEFT JOIN "Users" ON "Podcasts".userid = "Users".userid + LEFT JOIN "UserEpisodeHistory" ON "Episodes".episodeid = "UserEpisodeHistory".episodeid + AND "UserEpisodeHistory".userid = $1 + LEFT JOIN "SavedEpisodes" ON "Episodes".episodeid = "SavedEpisodes".episodeid + AND "SavedEpisodes".userid = $1 + LEFT JOIN "EpisodeQueue" ON "Episodes".episodeid = "EpisodeQueue".episodeid + AND "EpisodeQueue".userid = $1 + AND "EpisodeQueue".is_youtube = FALSE + LEFT JOIN "DownloadedEpisodes" ON "Episodes".episodeid = "DownloadedEpisodes".episodeid + AND "DownloadedEpisodes".userid = $1 + WHERE "PlaylistContents".playlistid = $2 + ORDER BY "PlaylistContents".dateadded DESC + "#) + .bind(user_id) + .bind(playlist_id) + .fetch_all(pool) + .await? + }; + + let mut episodes = Vec::new(); + for row in episodes_rows { + let episodeid: i32 = row.try_get("episodeid")?; + let episodetitle: String = row.try_get("episodetitle")?; + let naive = row.try_get::("episodepubdate")?; + let episodepubdate = naive.format("%Y-%m-%dT%H:%M:%S").to_string(); + let episodedescription: String = row.try_get("episodedescription")?; + let episodeartwork: String = row.try_get("episodeartwork")?; + let episodeurl: String = row.try_get("episodeurl")?; + let episodeduration: i32 = row.try_get("episodeduration")?; + let completed: bool = row.try_get("completed")?; + let podcastname: String = row.try_get("podcastname")?; + let podcastid: i32 = row.try_get("podcastid")?; + let is_youtube: bool = row.try_get("is_youtube")?; + let listenduration: Option = row.try_get("listenduration")?; + let saved: bool = row.try_get("saved")?; + let queued: bool = row.try_get("queued")?; + let downloaded: bool = row.try_get("downloaded")?; + let dateadded_naive = row.try_get::("dateadded")?; + let dateadded = dateadded_naive.format("%Y-%m-%dT%H:%M:%S").to_string(); + + episodes.push(serde_json::json!({ + "episodeid": episodeid, + "episodetitle": episodetitle, + "episodepubdate": episodepubdate, + "episodedescription": episodedescription, + "episodeartwork": episodeartwork, + "episodeurl": episodeurl, + "episodeduration": episodeduration, + "completed": completed, + "podcastname": podcastname, + "podcastid": podcastid, + "is_youtube": is_youtube, + "listenduration": listenduration, + "saved": saved, + "queued": queued, + "downloaded": downloaded, + "dateadded": dateadded + })); + } + + // Build playlist_info structure matching Python exactly + let playlist_info = serde_json::json!({ + "name": playlist_name, + "description": playlist_description, + "episode_count": episode_count, + "icon_name": icon_name + }); + + Ok(serde_json::json!({ + "playlist_info": playlist_info, + "episodes": episodes + })) + } + DatabasePool::MySQL(pool) => { + // Get playlist info with episode count - matches Python exactly + let playlist_row = sqlx::query( + "SELECT + p.Name, + p.Description, + (SELECT COUNT(*) + FROM PlaylistContents pc + JOIN Episodes e ON pc.EpisodeID = e.EpisodeID + JOIN Podcasts pod ON e.PodcastID = pod.PodcastID + LEFT JOIN UserEpisodeHistory h ON e.EpisodeID = h.EpisodeID AND h.UserID = ? + WHERE pc.PlaylistID = p.PlaylistID + AND (p.IsSystemPlaylist = 0 OR + (p.IsSystemPlaylist = 1 AND + (h.EpisodeID IS NOT NULL OR pod.UserID = ?))) + ) as episode_count, + p.IconName, + p.IsSystemPlaylist + FROM Playlists p + WHERE p.PlaylistID = ? AND (p.UserID = ? OR p.IsSystemPlaylist = 1)" + ) + .bind(user_id) + .bind(user_id) + .bind(playlist_id) + .bind(user_id) + .fetch_optional(pool) + .await?; + + if playlist_row.is_none() { + return Err(AppError::not_found("Playlist not found")); + } + + let row = playlist_row.unwrap(); + let playlist_name: String = row.try_get("Name")?; + let playlist_description: String = row.try_get("Description").unwrap_or_default(); + let episode_count: i64 = row.try_get("episode_count")?; + let icon_name: String = row.try_get("IconName").unwrap_or_default(); + let is_system_playlist: bool = row.try_get::("IsSystemPlaylist")? != 0; + + let episodes_rows = if is_system_playlist { + // For system playlists, update playlist first to get current episodes, then filter to user's podcasts + self.update_playlist_contents(playlist_id).await?; + + // Same query as user playlists but with additional filter for user's podcasts + sqlx::query( + "SELECT DISTINCT + e.EpisodeID as episodeid, + e.EpisodeTitle as episodetitle, + e.EpisodePubDate as episodepubdate, + e.EpisodeDescription as episodedescription, + CASE + WHEN p.UsePodcastCoversCustomized = TRUE AND p.UsePodcastCovers = TRUE THEN p.ArtworkURL + WHEN u.UsePodcastCovers = TRUE THEN p.ArtworkURL + ELSE e.EpisodeArtwork + END as episodeartwork, + e.EpisodeURL as episodeurl, + e.EpisodeDuration as episodeduration, + e.Completed as completed, + p.PodcastName as podcastname, + p.PodcastID as podcastid, + p.IsYouTubeChannel as is_youtube, + ueh.ListenDuration as listenduration, + CASE WHEN se.EpisodeID IS NOT NULL THEN 1 ELSE 0 END AS saved, + CASE WHEN eq.EpisodeID IS NOT NULL THEN 1 ELSE 0 END AS queued, + CASE WHEN de.EpisodeID IS NOT NULL THEN 1 ELSE 0 END AS downloaded, + pc.DateAdded as addeddate + FROM PlaylistContents pc + JOIN Episodes e ON pc.EpisodeID = e.EpisodeID + JOIN Podcasts p ON e.PodcastID = p.PodcastID + LEFT JOIN Users u ON p.UserID = u.UserID + LEFT JOIN UserEpisodeHistory ueh ON e.EpisodeID = ueh.EpisodeID AND ueh.UserID = ? + LEFT JOIN SavedEpisodes se ON e.EpisodeID = se.EpisodeID AND se.UserID = ? + LEFT JOIN EpisodeQueue eq ON e.EpisodeID = eq.EpisodeID AND eq.UserID = ? AND eq.is_youtube = 0 + LEFT JOIN DownloadedEpisodes de ON e.EpisodeID = de.EpisodeID AND de.UserID = ? + WHERE pc.PlaylistID = ? AND p.UserID = ? + ORDER BY pc.DateAdded DESC" + ) + .bind(user_id) + .bind(user_id) + .bind(user_id) + .bind(user_id) + .bind(playlist_id) + .bind(user_id) + .fetch_all(pool) + .await? + } else { + // For user playlists, use existing PlaylistContents logic + sqlx::query( + "SELECT DISTINCT + e.EpisodeID as episodeid, + e.EpisodeTitle as episodetitle, + e.EpisodePubDate as episodepubdate, + e.EpisodeDescription as episodedescription, + CASE + WHEN p.UsePodcastCoversCustomized = TRUE AND p.UsePodcastCovers = TRUE THEN p.ArtworkURL + WHEN u.UsePodcastCovers = TRUE THEN p.ArtworkURL + ELSE e.EpisodeArtwork + END as episodeartwork, + e.EpisodeURL as episodeurl, + e.EpisodeDuration as episodeduration, + e.Completed as completed, + p.PodcastName as podcastname, + p.PodcastID as podcastid, + p.IsYouTubeChannel as is_youtube, + ueh.ListenDuration as listenduration, + CASE WHEN se.EpisodeID IS NOT NULL THEN 1 ELSE 0 END AS saved, + CASE WHEN eq.EpisodeID IS NOT NULL THEN 1 ELSE 0 END AS queued, + CASE WHEN de.EpisodeID IS NOT NULL THEN 1 ELSE 0 END AS downloaded, + pc.DateAdded as addeddate + FROM PlaylistContents pc + JOIN Episodes e ON pc.EpisodeID = e.EpisodeID + JOIN Podcasts p ON e.PodcastID = p.PodcastID + LEFT JOIN Users u ON p.UserID = u.UserID + LEFT JOIN UserEpisodeHistory ueh ON e.EpisodeID = ueh.EpisodeID AND ueh.UserID = ? + LEFT JOIN SavedEpisodes se ON e.EpisodeID = se.EpisodeID AND se.UserID = ? + LEFT JOIN EpisodeQueue eq ON e.EpisodeID = eq.EpisodeID AND eq.UserID = ? AND eq.is_youtube = 0 + LEFT JOIN DownloadedEpisodes de ON e.EpisodeID = de.EpisodeID AND de.UserID = ? + WHERE pc.PlaylistID = ? + ORDER BY pc.DateAdded DESC" + ) + .bind(user_id) + .bind(user_id) + .bind(user_id) + .bind(user_id) + .bind(playlist_id) + .fetch_all(pool) + .await? + }; + + let mut episodes = Vec::new(); + for row in episodes_rows { + let episodeid: i32 = row.try_get("episodeid")?; + let episodetitle: String = row.try_get("episodetitle")?; + let naive = row.try_get::("episodepubdate")?; + let episodepubdate = naive.format("%Y-%m-%dT%H:%M:%S").to_string(); + let episodedescription: String = row.try_get("episodedescription")?; + let episodeartwork: String = row.try_get("episodeartwork")?; + let episodeurl: String = row.try_get("episodeurl")?; + let episodeduration: i32 = row.try_get("episodeduration")?; + let completed: bool = row.try_get::("completed")? != 0; + let podcastname: String = row.try_get("podcastname")?; + let podcastid: i32 = row.try_get("podcastid")?; + let is_youtube: bool = row.try_get::("is_youtube")? != 0; + let listenduration: Option = row.try_get("listenduration")?; + let saved: bool = row.try_get::("saved")? != 0; + let queued: bool = row.try_get::("queued")? != 0; + let downloaded: bool = row.try_get::("downloaded")? != 0; + let addeddate_dt = row.try_get::, _>("addeddate")?; + let addeddate = addeddate_dt.format("%Y-%m-%dT%H:%M:%S").to_string(); + + episodes.push(serde_json::json!({ + "episodeid": episodeid, + "episodetitle": episodetitle, + "episodepubdate": episodepubdate, + "episodedescription": episodedescription, + "episodeartwork": episodeartwork, + "episodeurl": episodeurl, + "episodeduration": episodeduration, + "completed": completed, + "podcastname": podcastname, + "podcastid": podcastid, + "is_youtube": is_youtube, + "listenduration": listenduration, + "saved": saved, + "queued": queued, + "downloaded": downloaded, + "dateadded": addeddate + })); + } + + // Build playlist_info structure matching Python exactly + let playlist_info = serde_json::json!({ + "name": playlist_name, + "description": playlist_description, + "episode_count": episode_count, + "icon_name": icon_name + }); + + Ok(serde_json::json!({ + "playlist_info": playlist_info, + "episodes": episodes + })) } } - - if processed_shows.is_empty() { - tracing::info!("No shows found for person: {}", person_name); - return Ok(()); - } - - // 3. Process each unique show - for (title, feed_url, feed_id) in processed_shows { - match self.process_person_show(user_id, person_id, &title, &feed_url, feed_id).await { - Ok(_) => { - tracing::info!("Successfully processed show: {}", title); - } - Err(e) => { - tracing::error!("Error processing show {}: {}", title, e); - continue; - } + } + + // Set user playback speed - matches Python set_playback_speed_user function + pub async fn set_playback_speed_user(&self, user_id: i32, playback_speed: f64) -> AppResult<()> { + match self { + DatabasePool::Postgres(pool) => { + sqlx::query(r#"UPDATE "Users" SET playbackspeed = $1 WHERE userid = $2"#) + .bind(playback_speed) + .bind(user_id) + .execute(pool) + .await?; + } + DatabasePool::MySQL(pool) => { + sqlx::query("UPDATE Users SET PlaybackSpeed = ? WHERE UserID = ?") + .bind(playback_speed) + .bind(user_id) + .execute(pool) + .await?; } } - Ok(()) } - // Helper function to process individual show for person - matches Python logic - async fn process_person_show(&self, user_id: i32, person_id: i32, title: &str, feed_url: &str, feed_id: i32) -> AppResult<()> { - // First check if podcast exists for user - let user_podcast_id = self.get_podcast_id_by_feed_url(user_id, feed_url).await?; - - let podcast_id = if user_podcast_id.is_none() { - // Check if system podcast exists (UserID = 1) - let system_podcast_id = self.get_podcast_id_by_feed_url(1, feed_url).await?; - - if system_podcast_id.is_none() { - // Add as new system podcast - tracing::info!("Creating system podcast for feed: {}", feed_url); - let podcast_values = self.get_podcast_values_for_person(feed_url).await?; - let add_result = self.add_person_podcast_from_values(&podcast_values, 1).await?; - tracing::info!("Add podcast result: {}", add_result); - - // Get the podcast ID after adding - tracing::info!("Looking for podcast with UserID=1 and FeedURL='{}'", feed_url); - match self.get_podcast_id_by_feed_url(1, feed_url).await? { - Some(id) => { - tracing::info!("Successfully created system podcast with ID: {}", id); - id - } - None => { - // Let's debug by listing all podcasts for UserID=1 - tracing::error!("Failed to get podcast ID after adding system podcast for feed: {}", feed_url); - - // Debug: List all system podcasts to see what's there - match self { - DatabasePool::Postgres(pool) => { - let rows = sqlx::query(r#"SELECT podcastid, podcastname, feedurl FROM "Podcasts" WHERE userid = $1"#) - .bind(1) - .fetch_all(pool) - .await?; - - tracing::error!("System podcasts (UserID=1):"); - for row in rows { - let id: i32 = row.try_get("podcastid")?; - let name: String = row.try_get("podcastname")?; - let url: String = row.try_get("feedurl")?; - tracing::error!(" ID: {}, Name: '{}', URL: '{}'", id, name, url); - } - } - DatabasePool::MySQL(pool) => { - let rows = sqlx::query("SELECT PodcastID, PodcastName, FeedURL FROM Podcasts WHERE UserID = ?") - .bind(1) - .fetch_all(pool) - .await?; - - tracing::error!("System podcasts (UserID=1):"); - for row in rows { - let id: i32 = row.try_get("PodcastID")?; - let name: String = row.try_get("PodcastName")?; - let url: String = row.try_get("FeedURL")?; - tracing::error!(" ID: {}, Name: '{}', URL: '{}'", id, name, url); - } - } - } - - return Err(AppError::internal("Failed to create system podcast")); - } - } - } else { - system_podcast_id.unwrap() + // Set user podcast cover preference - global setting + pub async fn set_global_podcast_cover_preference(&self, user_id: i32, use_podcast_covers: bool) -> AppResult<()> { + match self { + DatabasePool::Postgres(pool) => { + sqlx::query(r#"UPDATE "Users" SET usepodcastcovers = $1 WHERE userid = $2"#) + .bind(use_podcast_covers) + .bind(user_id) + .execute(pool) + .await?; } - } else { - user_podcast_id.unwrap() - }; - - tracing::info!("Using podcast: ID={}, Title={}", podcast_id, title); - - // Add episodes to PeopleEpisodes - self.add_people_episodes(person_id, podcast_id, feed_url).await?; - + DatabasePool::MySQL(pool) => { + sqlx::query("UPDATE Users SET UsePodcastCovers = ? WHERE UserID = ?") + .bind(use_podcast_covers as i32) + .bind(user_id) + .execute(pool) + .await?; + } + } Ok(()) } - // Add people episodes - matches Python add_people_episodes function exactly - pub async fn add_people_episodes(&self, person_id: i32, podcast_id: i32, feed_url: &str) -> AppResult<()> { - // Validate that we have a valid podcast ID - if podcast_id <= 0 { - return Err(AppError::internal(&format!("Invalid podcast ID {} for person episodes", podcast_id))); - } - - // Use the same robust feed fetching and parsing as add_episodes - let content = self.try_fetch_feed(feed_url, None, None).await?; - let episodes = self.parse_rss_feed(&content, podcast_id, "").await?; - - println!("Parsed {} episodes from feed for person {} with podcast ID {}", episodes.len(), person_id, podcast_id); - - let mut added_count = 0; - - for episode in episodes { - // Check if episode already exists - let episode_exists = match self { - DatabasePool::Postgres(pool) => { - let result = sqlx::query(r#" - SELECT episodeid FROM "PeopleEpisodes" - WHERE personid = $1 AND podcastid = $2 AND episodeurl = $3 - "#) - .bind(person_id) + // Set podcast cover preference - per-podcast setting + pub async fn set_podcast_cover_preference(&self, user_id: i32, podcast_id: i32, use_podcast_covers: bool) -> AppResult<()> { + match self { + DatabasePool::Postgres(pool) => { + sqlx::query(r#"UPDATE "Podcasts" SET usepodcastcovers = $1, usepodcastcoverscustomized = TRUE WHERE podcastid = $2 AND userid = $3"#) + .bind(use_podcast_covers) .bind(podcast_id) - .bind(&episode.url) - .fetch_optional(pool) + .bind(user_id) + .execute(pool) .await?; - - result.is_some() - } - DatabasePool::MySQL(pool) => { - let result = sqlx::query(" - SELECT EpisodeID FROM PeopleEpisodes - WHERE PersonID = ? AND PodcastID = ? AND EpisodeURL = ? - ") - .bind(person_id) + } + DatabasePool::MySQL(pool) => { + sqlx::query("UPDATE Podcasts SET UsePodcastCovers = ?, UsePodcastCoversCustomized = 1 WHERE PodcastID = ? AND UserID = ?") + .bind(use_podcast_covers as i32) .bind(podcast_id) - .bind(&episode.url) - .fetch_optional(pool) + .bind(user_id) + .execute(pool) .await?; - - result.is_some() - } - }; - - if episode_exists { - continue; } - - // Insert new episode - match self { - DatabasePool::Postgres(pool) => { - // PostgreSQL expects timestamp type, not string - let naive_datetime = episode.pub_date.naive_utc(); - sqlx::query(r#" - INSERT INTO "PeopleEpisodes" - (personid, podcastid, episodetitle, episodedescription, - episodeurl, episodeartwork, episodepubdate, episodeduration) - VALUES ($1, $2, $3, $4, $5, $6, $7, $8) - "#) - .bind(person_id) + } + Ok(()) + } + + // Clear podcast cover preference - reset to use global setting + pub async fn clear_podcast_cover_preference(&self, user_id: i32, podcast_id: i32) -> AppResult<()> { + match self { + DatabasePool::Postgres(pool) => { + sqlx::query(r#"UPDATE "Podcasts" SET usepodcastcovers = FALSE, usepodcastcoverscustomized = FALSE WHERE podcastid = $1 AND userid = $2"#) .bind(podcast_id) - .bind(&episode.title) - .bind(&episode.description) - .bind(&episode.url) - .bind(&episode.artwork_url) - .bind(naive_datetime) - .bind(episode.duration as i32) + .bind(user_id) .execute(pool) .await?; - } - DatabasePool::MySQL(pool) => { - // MySQL accepts string format - let pub_date_str = episode.pub_date.format("%Y-%m-%d %H:%M:%S").to_string(); - sqlx::query(" - INSERT INTO PeopleEpisodes - (PersonID, PodcastID, EpisodeTitle, EpisodeDescription, - EpisodeURL, EpisodeArtwork, EpisodePubDate, EpisodeDuration) - VALUES (?, ?, ?, ?, ?, ?, ?, ?) - ") - .bind(person_id) + } + DatabasePool::MySQL(pool) => { + sqlx::query("UPDATE Podcasts SET UsePodcastCovers = 0, UsePodcastCoversCustomized = 0 WHERE PodcastID = ? AND UserID = ?") .bind(podcast_id) - .bind(&episode.title) - .bind(&episode.description) - .bind(&episode.url) - .bind(&episode.artwork_url) - .bind(&pub_date_str) - .bind(episode.duration as i32) + .bind(user_id) .execute(pool) .await?; - } } - - added_count += 1; } - - println!("Successfully added {} new episodes for person {} from podcast {}", added_count, person_id, podcast_id); Ok(()) } - // Helper function to parse duration from string - fn parse_duration(&self, duration_str: &str) -> Option { - if duration_str.contains(':') { - let parts: Vec<&str> = duration_str.split(':').collect(); - match parts.len() { - 2 => { - let minutes: i64 = parts[0].parse().ok()?; - let seconds: i64 = parts[1].parse().ok()?; - Some(minutes * 60 + seconds) - } - 3 => { - let hours: i64 = parts[0].parse().ok()?; - let minutes: i64 = parts[1].parse().ok()?; - let seconds: i64 = parts[2].parse().ok()?; - Some(hours * 3600 + minutes * 60 + seconds) - } - _ => None, + // Get global podcast cover preference - for settings page + pub async fn get_global_podcast_cover_preference(&self, user_id: i32) -> AppResult { + match self { + DatabasePool::Postgres(pool) => { + let result = sqlx::query_scalar::<_, Option>(r#"SELECT usepodcastcovers FROM "Users" WHERE userid = $1"#) + .bind(user_id) + .fetch_one(pool) + .await?; + Ok(result.unwrap_or(false)) + } + DatabasePool::MySQL(pool) => { + let result = sqlx::query_scalar::<_, Option>("SELECT UsePodcastCovers FROM Users WHERE UserID = ?") + .bind(user_id) + .fetch_one(pool) + .await?; + Ok(result.unwrap_or(0) != 0) } - } else if let Ok(duration) = duration_str.parse::() { - Some(duration) - } else { - None } } - // Add person podcast from values map - matches Python add_person_podcast function exactly - pub async fn add_person_podcast_from_values(&self, podcast_values: &std::collections::HashMap, user_id: i32) -> AppResult { - // Use the same key mapping as add_podcast_from_values - let pod_title = podcast_values.get("podcastname").cloned().unwrap_or_default(); - let pod_feed_url = podcast_values.get("feedurl").cloned().unwrap_or_default(); - let pod_artwork = podcast_values.get("artworkurl").cloned().unwrap_or_default(); - let pod_description = podcast_values.get("description").cloned().unwrap_or_default(); - // First check if podcast already exists for user with a valid feed URL + // Get per-podcast cover preference - for episode layout page + pub async fn get_podcast_cover_preference(&self, user_id: i32, podcast_id: i32) -> AppResult> { match self { DatabasePool::Postgres(pool) => { - let existing = sqlx::query(r#"SELECT podcastid FROM "Podcasts" WHERE feedurl = $1 AND userid = $2 AND feedurl != ''"#) - .bind(&pod_feed_url) + let result = sqlx::query_scalar::<_, Option>(r#"SELECT usepodcastcovers FROM "Podcasts" WHERE podcastid = $1 AND userid = $2 AND usepodcastcoverscustomized = TRUE"#) + .bind(podcast_id) .bind(user_id) .fetch_optional(pool) .await?; - - if existing.is_some() { - return Ok(true); // Already exists - } - - // Insert new podcast - sqlx::query(r#" - INSERT INTO "Podcasts" (podcastname, feedurl, artworkurl, description, - userid, autodownload, isyoutubechannel, podcastindexid) - VALUES ($1, $2, $3, $4, $5, FALSE, FALSE, $6) - "#) - .bind(&pod_title) - .bind(&pod_feed_url) - .bind(&pod_artwork) - .bind(&pod_description) - .bind(user_id) - .bind(0) // podcast_index_id placeholder - .execute(pool) - .await?; + Ok(result.flatten()) } DatabasePool::MySQL(pool) => { - let existing = sqlx::query("SELECT PodcastID FROM Podcasts WHERE FeedURL = ? AND UserID = ? AND FeedURL != ''") - .bind(&pod_feed_url) + let result = sqlx::query_scalar::<_, Option>("SELECT UsePodcastCovers FROM Podcasts WHERE PodcastID = ? AND UserID = ? AND UsePodcastCoversCustomized = 1") + .bind(podcast_id) .bind(user_id) .fetch_optional(pool) .await?; + Ok(result.flatten().map(|val| val != 0)) + } + } + } + + // Get all admin user IDs - matches Python add_news_feed_if_not_added logic + pub async fn get_all_admin_user_ids(&self) -> AppResult> { + match self { + DatabasePool::Postgres(pool) => { + let rows = sqlx::query(r#"SELECT userid FROM "Users" WHERE isadmin = TRUE"#) + .fetch_all(pool) + .await?; - if existing.is_some() { - return Ok(true); // Already exists - } + let user_ids: Vec = rows.into_iter() + .map(|row| row.try_get("userid")) + .collect::, _>>()?; - // Insert new podcast - sqlx::query(" - INSERT INTO Podcasts (PodcastName, FeedURL, ArtworkURL, Description, - UserID, AutoDownload, IsYouTubeChannel, PodcastIndexID) - VALUES (?, ?, ?, ?, ?, 0, 0, ?) - ") - .bind(&pod_title) - .bind(&pod_feed_url) - .bind(&pod_artwork) - .bind(&pod_description) - .bind(user_id) - .bind(0) // podcast_index_id placeholder - .execute(pool) - .await?; + Ok(user_ids) + } + DatabasePool::MySQL(pool) => { + let rows = sqlx::query("SELECT UserID FROM Users WHERE IsAdmin = 1") + .fetch_all(pool) + .await?; + + let user_ids: Vec = rows.into_iter() + .map(|row| row.try_get("UserID")) + .collect::, _>>()?; + + Ok(user_ids) } } - - Ok(true) } - // Get podcast ID by feed URL - pub async fn get_podcast_id_by_feed_url(&self, user_id: i32, feed_url: &str) -> AppResult> { + // Check if user already has a specific podcast feed - matches Python logic + pub async fn user_has_podcast_feed(&self, user_id: i32, feed_url: &str) -> AppResult { match self { DatabasePool::Postgres(pool) => { - let result = sqlx::query(r#"SELECT podcastid FROM "Podcasts" WHERE feedurl = $1 AND userid = $2"#) - .bind(feed_url) + let row = sqlx::query(r#"SELECT podcastid FROM "Podcasts" WHERE userid = $1 AND feedurl = $2"#) .bind(user_id) + .bind(feed_url) .fetch_optional(pool) .await?; - Ok(result.map(|row| row.try_get("podcastid")).transpose()?) + Ok(row.is_some()) } DatabasePool::MySQL(pool) => { - let result = sqlx::query("SELECT PodcastID FROM Podcasts WHERE FeedURL = ? AND UserID = ?") - .bind(feed_url) + let row = sqlx::query("SELECT PodcastID FROM Podcasts WHERE UserID = ? AND FeedURL = ?") .bind(user_id) + .bind(feed_url) .fetch_optional(pool) .await?; - Ok(result.map(|row| row.try_get("PodcastID")).transpose()?) + Ok(row.is_some()) } } } - // Get podcast values for person podcasts - uses existing get_podcast_values function - pub async fn get_podcast_values_for_person(&self, feed_url: &str) -> AppResult> { - self.get_podcast_values(feed_url, 1, None, None).await - } + // Add PinePods news feed to admin users - matches Python add_news_feed_if_not_added function + pub async fn add_news_feed_if_not_added(&self) -> AppResult<()> { + let admin_user_ids = self.get_all_admin_user_ids().await?; + let feed_url = "https://news.pinepods.online/feed.xml"; - // COMPLETE PLAYLIST SYSTEM IMPLEMENTATION - matches Python functionality exactly + for user_id in admin_user_ids { + // Check if this user already has the news feed + if !self.user_has_podcast_feed(user_id, feed_url).await? { + // Add the PinePods news feed using existing functions - matches Python add_custom_podcast + match self.get_podcast_values(feed_url, user_id, None, None).await { + Ok(podcast_values) => { + let feed_cutoff = 30; // Default cutoff like Python + if let Err(e) = self.add_podcast_from_values(&podcast_values, user_id, feed_cutoff, None, None).await { + eprintln!("Failed to add PinePods news feed for user {}: {}", user_id, e); + // Continue with other users even if one fails + } + }, + Err(e) => { + eprintln!("Failed to get podcast values for PinePods news feed for user {}: {}", user_id, e); + // Continue with other users even if one fails + } + } + } + } - // Update Fresh Releases playlist with timezone-aware logic - matches Python update_fresh_releases_playlist - async fn update_fresh_releases_playlist_postgres(&self, pool: &Pool, playlist_id: i32) -> AppResult { - tracing::info!("Updating Fresh Releases playlist with timezone logic"); - - // Get all users with their timezones - let users = sqlx::query(r#"SELECT userid, timezone FROM "Users""#) - .fetch_all(pool) - .await?; - - let mut added_episodes: std::collections::HashSet = std::collections::HashSet::new(); - let mut position = 1; + Ok(()) + } + + // Get YouTube video location - matches Python get_youtube_video_location function exactly + pub async fn get_youtube_video_location( + &self, + episode_id: i32, + user_id: i32, + ) -> AppResult> { + println!("Looking up YouTube video location for episode_id: {}, user_id: {}", episode_id, user_id); - for user in users { - let user_id: i32 = user.try_get("userid")?; - let timezone: Option = user.try_get("timezone").ok(); - let tz = timezone.as_deref().unwrap_or("UTC"); - - tracing::info!("Processing Fresh Releases for user {} with timezone {}", user_id, tz); - - // Get episodes from last 24 hours in user's timezone - let episodes = sqlx::query(r#" - SELECT e.episodeid - FROM "Episodes" e - JOIN "Podcasts" p ON e.podcastid = p.podcastid - WHERE e.episodepubdate AT TIME ZONE 'UTC' AT TIME ZONE $1 > - (CURRENT_TIMESTAMP AT TIME ZONE 'UTC' AT TIME ZONE $1 - INTERVAL '24 hours') - ORDER BY e.episodepubdate DESC - "#) - .bind(tz) - .fetch_all(pool) + let youtube_id = match self { + DatabasePool::Postgres(pool) => { + let row = sqlx::query(r#" + SELECT "YouTubeVideos".youtubevideoid + FROM "YouTubeVideos" + INNER JOIN "Podcasts" ON "YouTubeVideos".podcastid = "Podcasts".podcastid + WHERE "YouTubeVideos".videoid = $1 AND "Podcasts".userid = $2 + "#) + .bind(episode_id) + .bind(user_id) + .fetch_optional(pool) .await?; - - // Add unique episodes to playlist - for episode in episodes { - let episode_id: i32 = episode.try_get("episodeid")?; - if !added_episodes.contains(&episode_id) { - sqlx::query(r#" - INSERT INTO "PlaylistContents" (playlistid, episodeid, position) - VALUES ($1, $2, $3) - "#) - .bind(playlist_id) - .bind(episode_id) - .bind(position) - .execute(pool) - .await?; - - added_episodes.insert(episode_id); - position += 1; + + if let Some(row) = row { + row.try_get::("youtubevideoid")? + } else { + return Ok(None); + } + } + DatabasePool::MySQL(pool) => { + let row = sqlx::query(r#" + SELECT YouTubeVideos.YouTubeVideoID + FROM YouTubeVideos + INNER JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID + WHERE YouTubeVideos.VideoID = ? AND Podcasts.UserID = ? + "#) + .bind(episode_id) + .bind(user_id) + .fetch_optional(pool) + .await?; + + if let Some(row) = row { + row.try_get::("YouTubeVideoID")? + } else { + return Ok(None); + } + } + }; + + println!("Found YouTube ID: {}", youtube_id); + + let file_path = format!("/opt/pinepods/downloads/youtube/{}.mp3", youtube_id); + let file_path_double = format!("/opt/pinepods/downloads/youtube/{}.mp3.mp3", youtube_id); + + println!("Checking paths: {} and {}", file_path, file_path_double); + + if tokio::fs::metadata(&file_path).await.is_ok() { + println!("Found file at {}", file_path); + Ok(Some(file_path)) + } else if tokio::fs::metadata(&file_path_double).await.is_ok() { + println!("Found file at {}", file_path_double); + Ok(Some(file_path_double)) + } else { + println!("No file found for YouTube ID: {}", youtube_id); + Ok(None) + } + } + + // Get download location - matches Python get_download_location function exactly + pub async fn get_download_location( + &self, + episode_id: i32, + user_id: i32, + ) -> AppResult> { + println!("Looking up download location for episode_id: {}, user_id: {}", episode_id, user_id); + + match self { + DatabasePool::Postgres(pool) => { + let row = sqlx::query(r#"SELECT downloadedlocation FROM "DownloadedEpisodes" WHERE episodeid = $1 AND userid = $2"#) + .bind(episode_id) + .bind(user_id) + .fetch_optional(pool) + .await?; + + if let Some(row) = row { + let location: String = row.try_get("downloadedlocation")?; + println!("DownloadedLocation found: {}", location); + Ok(Some(location)) + } else { + println!("No DownloadedLocation found for the given EpisodeID and UserID"); + Ok(None) + } + } + DatabasePool::MySQL(pool) => { + let row = sqlx::query("SELECT DownloadedLocation FROM DownloadedEpisodes WHERE EpisodeID = ? AND UserID = ?") + .bind(episode_id) + .bind(user_id) + .fetch_optional(pool) + .await?; + + if let Some(row) = row { + let location: String = row.try_get("DownloadedLocation")?; + println!("DownloadedLocation found: {}", location); + Ok(Some(location)) + } else { + println!("No DownloadedLocation found for the given EpisodeID and UserID"); + Ok(None) } } } - - tracing::info!("Fresh Releases playlist updated with {} episodes", added_episodes.len()); - Ok(added_episodes.len() as i32) } - async fn update_fresh_releases_playlist_mysql(&self, pool: &Pool, playlist_id: i32) -> AppResult { - tracing::info!("Updating Fresh Releases playlist with timezone logic"); - - // Get all users with their timezones - let users = sqlx::query("SELECT UserID, TimeZone FROM Users") - .fetch_all(pool) - .await?; - - let mut added_episodes: std::collections::HashSet = std::collections::HashSet::new(); - let mut position = 1; + // Update YouTube video duration after download - updates duration from MP3 file + pub async fn update_youtube_video_duration(&self, video_id: &str, duration_seconds: i32) -> AppResult<()> { + println!("Updating duration for YouTube video {} to {} seconds", video_id, duration_seconds); - for user in users { - let user_id: i32 = user.try_get("UserID")?; - let timezone: Option = user.try_get("TimeZone").ok(); - let tz = timezone.as_deref().unwrap_or("UTC"); - - tracing::info!("Processing Fresh Releases for user {} with timezone {}", user_id, tz); - - // Get episodes from last 24 hours in user's timezone - let episodes = sqlx::query(" - SELECT e.EpisodeID - FROM Episodes e - JOIN Podcasts p ON e.PodcastID = p.PodcastID - WHERE CONVERT_TZ(e.EpisodePubDate, 'UTC', ?) > - DATE_SUB(CONVERT_TZ(NOW(), 'UTC', ?), INTERVAL 24 HOUR) - ORDER BY e.EpisodePubDate DESC - ") - .bind(tz) - .bind(tz) - .fetch_all(pool) - .await?; - - // Add unique episodes to playlist - for episode in episodes { - let episode_id: i32 = episode.try_get("EpisodeID")?; - if !added_episodes.contains(&episode_id) { - sqlx::query(" - INSERT INTO PlaylistContents (PlaylistID, EpisodeID, Position) - VALUES (?, ?, ?) - ") - .bind(playlist_id) - .bind(episode_id) - .bind(position) - .execute(pool) - .await?; - - added_episodes.insert(episode_id); - position += 1; - } + match self { + DatabasePool::Postgres(pool) => { + sqlx::query(r#"UPDATE "YouTubeVideos" SET duration = $1 WHERE youtubevideoid = $2"#) + .bind(duration_seconds) + .bind(video_id) + .execute(pool) + .await?; + } + DatabasePool::MySQL(pool) => { + sqlx::query("UPDATE YouTubeVideos SET Duration = ? WHERE YouTubeVideoID = ?") + .bind(duration_seconds) + .bind(video_id) + .execute(pool) + .await?; } } - tracing::info!("Fresh Releases playlist updated with {} episodes", added_episodes.len()); - Ok(added_episodes.len() as i32) + println!("Successfully updated duration for YouTube video {}", video_id); + Ok(()) } +} - // Build and execute playlist query for PostgreSQL - matches Python build_playlist_query exactly - async fn build_and_execute_playlist_query_postgres(&self, pool: &Pool, playlist: &sqlx::postgres::PgRow) -> AppResult { - let playlist_id: i32 = playlist.try_get("playlistid")?; - let user_id: i32 = playlist.try_get("userid")?; - let playlist_name: String = playlist.try_get("name")?; - let is_system_playlist: bool = playlist.try_get("issystemplaylist").unwrap_or(false); - - // Parse playlist configuration - let config = PlaylistConfig::from_postgres_row(playlist)?; - - // Determine if this playlist needs user history filtering - let needs_user_history = playlist_name == "Currently Listening" || - playlist_name == "Almost Done" || - !is_system_playlist; - - // Check for special optimized queries for partially played - if config.include_partially_played && !config.include_unplayed && !config.include_played { - return self.execute_partially_played_query_postgres(pool, playlist_id, user_id, &config).await; - } +#[derive(Debug)] +pub struct PodcastValues { + pub pod_title: String, + pub pod_description: String, + pub pod_artwork: String, + pub pod_feed_url: String, + pub user_id: i32, +} + +// Playlist configuration struct - matches Python playlist data structure exactly +#[derive(Debug, Clone)] +pub struct PlaylistConfig { + pub playlist_id: i32, + pub name: String, + pub user_id: i32, + pub podcast_ids: Option>, + pub include_unplayed: bool, + pub include_partially_played: bool, + pub include_played: bool, + pub play_progress_min: Option, + pub play_progress_max: Option, + pub time_filter_hours: Option, + pub min_duration: Option, + pub max_duration: Option, + pub sort_order: String, + pub group_by_podcast: bool, + pub max_episodes: Option, + pub is_system_playlist: bool, +} + +impl PlaylistConfig { + // Create from PostgreSQL row - matches Python playlist dictionary extraction + pub fn from_postgres_row(row: &sqlx::postgres::PgRow) -> AppResult { + use sqlx::Row; - // Build the appropriate base query - FIXED PARAMETER INDEXING - let (base_query, params) = if is_system_playlist { - if needs_user_history { - // System playlist with user history filtering - (r#" - SELECT e.episodeid, p.podcastid, u.timezone - FROM "Episodes" e - JOIN "Podcasts" p ON e.podcastid = p.podcastid - LEFT JOIN "UserEpisodeHistory" h ON e.episodeid = h.episodeid AND h.userid = $1 - JOIN "Users" u ON u.userid = $2 - WHERE 1=1 - "#.to_string(), vec![user_id, user_id]) - } else { - // System playlist without user history filtering (Fresh Releases) - (r#" - SELECT e.episodeid, p.podcastid, u.timezone - FROM "Episodes" e - JOIN "Podcasts" p ON e.podcastid = p.podcastid - LEFT JOIN "UserEpisodeHistory" h ON e.episodeid = h.episodeid - JOIN "Users" u ON u.userid = $1 - WHERE 1=1 - "#.to_string(), vec![user_id]) + // Parse podcast IDs from PostgreSQL int4 array - can be NULL or {29,57} format + let podcast_ids = match row.try_get::>, _>("podcastids") { + Ok(Some(ids)) => { + println!("PostgreSQL got podcastids array: {:?}", ids); + if ids.is_empty() { + None + } else { + Some(ids) + } + } + Ok(None) => { + println!("PostgreSQL podcastids is NULL"); + None + } + Err(_) => { + println!("PostgreSQL failed to get podcastids as array"); + None } - } else { - // User-specific playlist - (r#" - SELECT e.episodeid, p.podcastid, u.timezone - FROM "Episodes" e - JOIN "Podcasts" p ON e.podcastid = p.podcastid - LEFT JOIN "UserEpisodeHistory" h ON e.episodeid = h.episodeid AND h.userid = $1 - JOIN "Users" u ON u.userid = $2 - WHERE p.userid = $3 - "#.to_string(), vec![user_id, user_id, user_id]) }; - // Build the complete query with all filters - let (complete_query, all_params) = self.build_complete_postgres_query( - base_query, params, &config, playlist_id - )?; - - // Execute the query and insert episodes - self.execute_playlist_query_postgres(pool, &complete_query, &all_params, playlist_id).await + Ok(PlaylistConfig { + playlist_id: row.try_get("playlistid")?, + name: row.try_get("name")?, + user_id: row.try_get("userid")?, + podcast_ids, + include_unplayed: row.try_get("includeunplayed").unwrap_or(true), + include_partially_played: row.try_get("includepartiallyplayed").unwrap_or(true), + include_played: row.try_get("includeplayed").unwrap_or(false), + play_progress_min: row.try_get("playprogressmin").ok(), + play_progress_max: row.try_get("playprogressmax").ok(), + time_filter_hours: row.try_get("timefilterhours").ok(), + min_duration: row.try_get("minduration").ok(), + max_duration: row.try_get("maxduration").ok(), + sort_order: row.try_get("sortorder").unwrap_or_else(|_| "date_desc".to_string()), + group_by_podcast: row.try_get("groupbypodcast").unwrap_or(false), + max_episodes: row.try_get("maxepisodes").ok(), + is_system_playlist: row.try_get("issystemplaylist").unwrap_or(false), + }) } - - // Build and execute playlist query for MySQL - matches Python build_playlist_query exactly - async fn build_and_execute_playlist_query_mysql(&self, pool: &Pool, playlist: &sqlx::mysql::MySqlRow) -> AppResult { - let playlist_id: i32 = playlist.try_get("PlaylistID")?; - let user_id: i32 = playlist.try_get("UserID")?; - let playlist_name: String = playlist.try_get("PlaylistName")?; - let is_system_playlist: bool = playlist.try_get("IsSystemPlaylist").unwrap_or(false); - - // Parse playlist configuration - let config = PlaylistConfig::from_mysql_row(playlist)?; - - // Determine if this playlist needs user history filtering - let needs_user_history = playlist_name == "Currently Listening" || - playlist_name == "Almost Done" || - !is_system_playlist; - - // Check for special optimized queries for partially played - if config.include_partially_played && !config.include_unplayed && !config.include_played { - return self.execute_partially_played_query_mysql(pool, playlist_id, user_id, &config).await; - } + + // Create from MySQL row - matches Python playlist dictionary extraction + pub fn from_mysql_row(row: &sqlx::mysql::MySqlRow) -> AppResult { + use sqlx::Row; - // Build the appropriate base query - let (base_query, mut params) = if is_system_playlist { - if needs_user_history { - // System playlist with user history filtering - (" - SELECT e.EpisodeID - FROM Episodes e - JOIN Podcasts p ON e.PodcastID = p.PodcastID - LEFT JOIN UserEpisodeHistory h ON e.EpisodeID = h.EpisodeID AND h.UserID = ? - JOIN Users u ON u.UserID = ? - WHERE 1=1 - ".to_string(), vec![user_id, user_id]) - } else { - // System playlist without user history filtering (Fresh Releases) - (" - SELECT e.EpisodeID - FROM Episodes e - JOIN Podcasts p ON e.PodcastID = p.PodcastID - LEFT JOIN UserEpisodeHistory h ON e.EpisodeID = h.EpisodeID - JOIN Users u ON u.UserID = ? - WHERE 1=1 - ".to_string(), vec![user_id]) + // Parse podcast IDs from MySQL JSON (stored as BLOB) + let podcast_ids = match row.try_get::>, _>("PodcastIDs") { + Ok(Some(ids_bytes)) => { + let ids_str = String::from_utf8_lossy(&ids_bytes); + println!("Got PodcastIDs from BLOB: '{}'", ids_str); + if ids_str.is_empty() || ids_str == "null" || ids_str == "[]" { + None + } else { + let parsed: Vec = serde_json::from_str(&ids_str).unwrap_or_default(); + if parsed.is_empty() { + None + } else { + Some(parsed) + } + } + } + Ok(None) => { + println!("PodcastIDs is NULL"); + None + } + Err(_) => { + // Fallback to try as String for older records + match row.try_get::, _>("PodcastIDs") { + Ok(Some(ids_str)) => { + println!("Got PodcastIDs as String: '{}'", ids_str); + if ids_str.is_empty() || ids_str == "null" || ids_str == "[]" { + None + } else { + let parsed: Vec = serde_json::from_str(&ids_str).unwrap_or_default(); + if parsed.is_empty() { None } else { Some(parsed) } + } + } + _ => None + } } - } else { - // User-specific playlist - (" - SELECT e.EpisodeID - FROM Episodes e - JOIN Podcasts p ON e.PodcastID = p.PodcastID - LEFT JOIN UserEpisodeHistory h ON e.EpisodeID = h.EpisodeID AND h.UserID = ? - JOIN Users u ON u.UserID = ? - WHERE p.UserID = ? - ".to_string(), vec![user_id, user_id, user_id]) }; - // Build the complete query with all filters - let (complete_query, all_params) = self.build_complete_mysql_query( - base_query, params, &config, playlist_id - )?; - - // Execute the query and insert episodes - self.execute_playlist_query_mysql(pool, &complete_query, &all_params, playlist_id).await + Ok(PlaylistConfig { + playlist_id: row.try_get("PlaylistID")?, + name: row.try_get("Name")?, + user_id: row.try_get("UserID")?, + podcast_ids, + include_unplayed: row.try_get("IncludeUnplayed").unwrap_or(true), + include_partially_played: row.try_get("IncludePartiallyPlayed").unwrap_or(true), + include_played: row.try_get("IncludePlayed").unwrap_or(false), + play_progress_min: row.try_get("PlayProgressMin").ok(), + play_progress_max: row.try_get("PlayProgressMax").ok(), + time_filter_hours: row.try_get("TimeFilterHours").ok(), + min_duration: row.try_get("MinDuration").ok(), + max_duration: row.try_get("MaxDuration").ok(), + sort_order: row.try_get("SortOrder").unwrap_or_else(|_| "date_desc".to_string()), + group_by_podcast: row.try_get("GroupByPodcast").unwrap_or(false), + max_episodes: row.try_get("MaxEpisodes").ok(), + is_system_playlist: row.try_get("IsSystemPlaylist").unwrap_or(false), + }) } - - // Execute optimized partially played query for PostgreSQL - FIXED VERSION - async fn execute_partially_played_query_postgres(&self, pool: &Pool, playlist_id: i32, user_id: i32, config: &PlaylistConfig) -> AppResult { - // Use direct INSERT without subquery for this optimized case - no alias scoping issues - let sort_order = config.get_postgres_sort_order().replace("ORDER BY ", ""); - - // Build base query - let mut query = format!(r#" - INSERT INTO "PlaylistContents" (playlistid, episodeid, position) - SELECT $1, e.episodeid, ROW_NUMBER() OVER (ORDER BY {}) as position - FROM "Episodes" e - JOIN "Podcasts" p ON e.podcastid = p.podcastid - JOIN "UserEpisodeHistory" h ON e.episodeid = h.episodeid - WHERE h.listenduration > 0 - AND h.listenduration < e.episodeduration - AND e.completed = FALSE - AND e.episodeduration > 0 - AND h.userid = $2 - "#, sort_order); - - let mut params = vec![playlist_id, user_id]; - let mut param_index = 3; - - // Add progress filters - if let Some(min_progress) = config.play_progress_min { - query.push_str(&format!(" AND (h.listenduration::float / NULLIF(e.episodeduration, 0)) >= ${}", param_index)); - params.push((min_progress / 100.0) as i32); // Simplified - should be f64 but keeping i32 for now - param_index += 1; + + // Get PostgreSQL sort order - matches Python sort_mapping exactly (for inner query) + pub fn get_postgres_sort_order(&self) -> String { + match self.sort_order.as_str() { + "date_asc" => "ORDER BY e.episodepubdate ASC".to_string(), + "date_desc" => "ORDER BY e.episodepubdate DESC".to_string(), + "duration_asc" => "ORDER BY e.episodeduration ASC".to_string(), + "duration_desc" => "ORDER BY e.episodeduration DESC".to_string(), + "listen_progress" => "ORDER BY (COALESCE(h.listenduration, 0)::float / NULLIF(e.episodeduration, 0)) DESC".to_string(), + "completion" => "ORDER BY COALESCE(h.listenduration::float / NULLIF(e.episodeduration, 0), 0) DESC".to_string(), + _ => "ORDER BY e.episodepubdate DESC".to_string(), } - - if let Some(max_progress) = config.play_progress_max { - query.push_str(&format!(" AND (h.listenduration::float / NULLIF(e.episodeduration, 0)) <= ${}", param_index)); - params.push((max_progress / 100.0) as i32); // Simplified - should be f64 but keeping i32 for now - param_index += 1; + } + + // Get PostgreSQL sort order for outer query (ROW_NUMBER OVER) - fixed alias scoping + pub fn get_postgres_outer_sort_order(&self) -> String { + match self.sort_order.as_str() { + "date_asc" => "ORDER BY episodes.episodepubdate ASC".to_string(), + "date_desc" => "ORDER BY episodes.episodepubdate DESC".to_string(), + "duration_asc" => "ORDER BY episodes.episodeduration ASC".to_string(), + "duration_desc" => "ORDER BY episodes.episodeduration DESC".to_string(), + "listen_progress" => "ORDER BY (episodes.listenduration::float / NULLIF(episodes.episodeduration, 0)) DESC".to_string(), + "completion" => "ORDER BY (episodes.listenduration::float / NULLIF(episodes.episodeduration, 0)) DESC".to_string(), + _ => "ORDER BY episodes.episodepubdate DESC".to_string(), } - - // Add limit - if let Some(max_episodes) = config.max_episodes { - query.push_str(&format!(" LIMIT {}", max_episodes)); + } + + // Get MySQL sort order - matches Python sort_mapping exactly (for inner query) + pub fn get_mysql_sort_order(&self) -> String { + match self.sort_order.as_str() { + "date_asc" => "ORDER BY e.EpisodePubDate ASC".to_string(), + "date_desc" => "ORDER BY e.EpisodePubDate DESC".to_string(), + "duration_asc" => "ORDER BY e.EpisodeDuration ASC".to_string(), + "duration_desc" => "ORDER BY e.EpisodeDuration DESC".to_string(), + "listen_progress" => "ORDER BY (COALESCE(h.ListenDuration, 0) / NULLIF(e.EpisodeDuration, 0)) DESC".to_string(), + "completion" => "ORDER BY COALESCE(h.ListenDuration / NULLIF(e.EpisodeDuration, 0), 0) DESC".to_string(), + _ => "ORDER BY e.EpisodePubDate DESC".to_string(), + } + } + + // Get MySQL sort order for outer query (ROW_NUMBER OVER) - fixed alias scoping + pub fn get_mysql_outer_sort_order(&self) -> String { + match self.sort_order.as_str() { + "date_asc" => "ORDER BY episodes.EpisodePubDate ASC".to_string(), + "date_desc" => "ORDER BY episodes.EpisodePubDate DESC".to_string(), + "duration_asc" => "ORDER BY episodes.EpisodeDuration ASC".to_string(), + "duration_desc" => "ORDER BY episodes.EpisodeDuration DESC".to_string(), + "listen_progress" => "ORDER BY (episodes.ListenDuration / NULLIF(episodes.EpisodeDuration, 0)) DESC".to_string(), + "completion" => "ORDER BY (episodes.ListenDuration / NULLIF(episodes.EpisodeDuration, 0)) DESC".to_string(), + _ => "ORDER BY episodes.EpisodePubDate DESC".to_string(), + } + } + + // Check if this is an "Almost Done" playlist - matches Python logic + pub fn is_almost_done(&self) -> bool { + self.name == "Almost Done" || + (self.include_partially_played && + !self.include_unplayed && + !self.include_played && + self.play_progress_min.map_or(false, |min| min >= 75.0)) + } + + // Check if this is a "Currently Listening" playlist - matches Python logic + pub fn is_currently_listening(&self) -> bool { + self.name == "Currently Listening" || + (self.include_partially_played && + !self.include_unplayed && + !self.include_played && + self.play_progress_min.is_none() && + self.play_progress_max.is_none()) + } + + // Check if this is a "Fresh Releases" playlist - matches Python logic + pub fn is_fresh_releases(&self) -> bool { + self.name == "Fresh Releases" && self.is_system_playlist + } + + // Get effective time filter hours - Fresh Releases defaults to 24 if not set + pub fn get_effective_time_filter_hours(&self) -> Option { + if self.is_fresh_releases() && self.time_filter_hours.is_none() { + Some(24) // Default 24 hours for Fresh Releases + } else { + self.time_filter_hours } + } +} + +impl DatabasePool { + // RSS key validation - matches Python get_rss_key_if_valid function + pub async fn get_rss_key_if_valid(&self, api_key: &str, podcast_ids: Option<&Vec>) -> AppResult> { + use crate::handlers::feed::RssKeyInfo; - tracing::info!("Executing partially played query with {} parameters", params.len()); - tracing::debug!("Query: {}", query); + let filter_podcast_ids = podcast_ids.map_or(false, |ids| !ids.is_empty() && !ids.contains(&-1)); - // Execute with proper parameter binding - let mut sqlx_query = sqlx::query(&query); - for param in ¶ms { - sqlx_query = sqlx_query.bind(*param); + match self { + DatabasePool::Postgres(pool) => { + let row = sqlx::query(r#" + SELECT fk.userid, STRING_AGG(CAST(fkm.podcastid AS TEXT), ',') as podcastids + FROM "RssKeys" fk + LEFT JOIN "RssKeyMap" fkm ON fk.rsskeyid = fkm.rsskeyid + WHERE fk.rsskey = $1 + GROUP BY fk.userid + "#) + .bind(api_key) + .fetch_optional(pool) + .await?; + + if let Some(row) = row { + let user_id: i32 = row.try_get("userid")?; + let podcast_ids_str: Option = row.try_get("podcastids").ok(); + + let key_podcast_ids = if let Some(ids_str) = podcast_ids_str { + ids_str.split(',') + .filter_map(|s| s.parse::().ok()) + .collect::>() + } else { + vec![-1] // Universal access if no specific podcasts + }; + + // Check if access is allowed + if filter_podcast_ids { + if let Some(requested_ids) = podcast_ids { + let has_universal = key_podcast_ids.contains(&-1); + let has_specific_access = requested_ids.iter() + .all(|id| key_podcast_ids.contains(id)); + + if !has_universal && !has_specific_access { + return Ok(None); + } + } + } + + Ok(Some(RssKeyInfo { + podcast_ids: key_podcast_ids, + user_id, + key: api_key.to_string(), + })) + } else { + Ok(None) + } + } + DatabasePool::MySQL(pool) => { + let row = sqlx::query(r#" + SELECT fk.UserID, GROUP_CONCAT(fkm.PodcastID) as podcastids + FROM RssKeys fk + LEFT JOIN RssKeyMap fkm ON fk.RssKeyID = fkm.RssKeyID + WHERE fk.RssKey = ? + GROUP BY fk.UserID + "#) + .bind(api_key) + .fetch_optional(pool) + .await?; + + if let Some(row) = row { + let user_id: i32 = row.try_get("UserID")?; + let podcast_ids_str: Option = row.try_get("podcastids").ok(); + + let key_podcast_ids = if let Some(ids_str) = podcast_ids_str { + ids_str.split(',') + .filter_map(|s| s.parse::().ok()) + .collect::>() + } else { + vec![-1] // Universal access if no specific podcasts + }; + + // Check if access is allowed + if filter_podcast_ids { + if let Some(requested_ids) = podcast_ids { + let has_universal = key_podcast_ids.contains(&-1); + let has_specific_access = requested_ids.iter() + .all(|id| key_podcast_ids.contains(id)); + + if !has_universal && !has_specific_access { + return Ok(None); + } + } + } + + Ok(Some(RssKeyInfo { + podcast_ids: key_podcast_ids, + user_id, + key: api_key.to_string(), + })) + } else { + Ok(None) + } + } } - - let result = sqlx_query.execute(pool).await?; - Ok(result.rows_affected() as i32) } - // Execute optimized partially played query for MySQL - async fn execute_partially_played_query_mysql(&self, pool: &Pool, playlist_id: i32, user_id: i32, config: &PlaylistConfig) -> AppResult { - // Use direct INSERT without subquery for this optimized case - no alias scoping issues - let sort_order = config.get_mysql_sort_order().replace("ORDER BY ", ""); - - let mut query = format!(" - INSERT INTO PlaylistContents (PlaylistID, EpisodeID, Position) - SELECT ?, e.EpisodeID, ROW_NUMBER() OVER (ORDER BY {}) as position - FROM Episodes e - JOIN Podcasts p ON e.PodcastID = p.PodcastID - JOIN UserEpisodeHistory h ON e.EpisodeID = h.EpisodeID - WHERE h.ListenDuration > 0 - AND h.ListenDuration < e.EpisodeDuration - AND e.Completed = FALSE - AND e.EpisodeDuration > 0 - AND h.UserID = ? - ", sort_order); - - // For simplicity, execute basic version - full implementation would add all filters - let result = sqlx::query(&query) - .bind(playlist_id) - .bind(user_id) - .execute(pool) - .await?; - - Ok(result.rows_affected() as i32) + // Get RSS feed status - matches Python get_rss_feed_status function + pub async fn get_rss_feed_status(&self, user_id: i32) -> AppResult { + match self { + DatabasePool::Postgres(pool) => { + let row = sqlx::query(r#"SELECT enablerssfeeds FROM "Users" WHERE userid = $1"#) + .bind(user_id) + .fetch_optional(pool) + .await?; + + if let Some(row) = row { + let enabled: Option = row.try_get("enablerssfeeds").ok(); + Ok(enabled.unwrap_or(false)) + } else { + Ok(false) + } + } + DatabasePool::MySQL(pool) => { + let row = sqlx::query("SELECT EnableRSSFeeds FROM Users WHERE UserID = ?") + .bind(user_id) + .fetch_optional(pool) + .await?; + + if let Some(row) = row { + let enabled: Option = row.try_get("EnableRSSFeeds").ok(); + Ok(enabled.unwrap_or(false)) + } else { + Ok(false) + } + } + } } - // Build complete PostgreSQL query with all filters - EXACT PYTHON MATCH - fn build_complete_postgres_query(&self, base_query: String, params: Vec, config: &PlaylistConfig, playlist_id: i32) -> AppResult<(String, Vec)> { - // Build proper SELECT query first - need to include columns for ordering in subquery - let mut select_columns = vec![ - "e.episodeid".to_string(), - "p.podcastid".to_string(), - "e.episodepubdate".to_string(), - "e.episodeduration".to_string(), - "COALESCE(h.listenduration, 0) as listenduration".to_string() - ]; - - let mut select_query = base_query.replace( - "SELECT e.episodeid, p.podcastid, u.timezone", - &format!("SELECT {}", select_columns.join(", ")) - ); + // Generate podcast RSS feed - matches Python generate_podcast_rss function + pub async fn generate_podcast_rss( + &self, + rss_key: crate::handlers::feed::RssKeyInfo, + limit: i32, + source_type: Option<&str>, + domain: &str, + podcast_ids: Option<&Vec>, + ) -> AppResult { - let mut all_params = params; - let mut param_index = all_params.len() + 1; + let user_id = rss_key.user_id; + let mut effective_podcast_ids = rss_key.podcast_ids.clone(); - // Add podcast filter (PostgreSQL array support) - if let Some(ref podcast_ids) = config.podcast_ids { - if !podcast_ids.is_empty() { - select_query.push_str(&format!(" AND p.podcastid = ANY(${})", param_index)); - // For now, we'll handle the first podcast ID only due to sqlx limitations - all_params.push(podcast_ids[0]); - param_index += 1; + // If podcast_id parameter is provided, use it; otherwise use RSS key podcast_ids + let explicit_podcast_filter = podcast_ids.is_some(); + if let Some(ids) = podcast_ids { + if !ids.is_empty() { + effective_podcast_ids = ids.clone(); } } - // Add duration filters - if let Some(min_duration) = config.min_duration { - select_query.push_str(&format!(" AND e.episodeduration >= ${}", param_index)); - all_params.push(min_duration); - param_index += 1; - } - - if let Some(max_duration) = config.max_duration { - select_query.push_str(&format!(" AND e.episodeduration <= ${}", param_index)); - all_params.push(max_duration); - param_index += 1; - } - - // Add time filter with timezone awareness - if let Some(time_filter_hours) = config.time_filter_hours { - select_query.push_str(&format!( - " AND e.episodepubdate AT TIME ZONE 'UTC' AT TIME ZONE COALESCE(u.timezone, 'UTC') > \ - (CURRENT_TIMESTAMP AT TIME ZONE 'UTC' AT TIME ZONE COALESCE(u.timezone, 'UTC') - INTERVAL '{}' HOUR)", - time_filter_hours - )); - } - - // Add play state filters - EXACT PYTHON LOGIC - let mut play_state_conditions = Vec::new(); + // Only use podcast filter if explicitly requested via URL parameter + // RSS key podcast_ids should not affect "All Podcasts" feed behavior + let podcast_filter = explicit_podcast_filter; - if config.include_unplayed { - play_state_conditions.push("h.listenduration IS NULL".to_string()); + // Check if RSS feeds are enabled for user + if !self.get_rss_feed_status(user_id).await? { + return Err(AppError::forbidden("RSS feeds not enabled for this user")); } - if config.include_partially_played { - let mut partial_condition = "(h.listenduration > 0 AND h.listenduration < e.episodeduration AND e.completed = FALSE)".to_string(); - - if let Some(min_progress) = config.play_progress_min { - partial_condition.push_str(&format!(" AND (h.listenduration::float / NULLIF(e.episodeduration, 0)) >= {}", min_progress / 100.0)); + // Get user info for feed metadata + let username = match self { + DatabasePool::Postgres(pool) => { + let row = sqlx::query(r#"SELECT username FROM "Users" WHERE userid = $1"#) + .bind(user_id) + .fetch_optional(pool) + .await?; + + if let Some(row) = row { + row.try_get::("username").unwrap_or_else(|_| "Unknown User".to_string()) + } else { + return Err(AppError::not_found("User not found")); + } } - - if let Some(max_progress) = config.play_progress_max { - partial_condition.push_str(&format!(" AND (h.listenduration::float / NULLIF(e.episodeduration, 0)) <= {}", max_progress / 100.0)); + DatabasePool::MySQL(pool) => { + let row = sqlx::query("SELECT Username FROM Users WHERE UserID = ?") + .bind(user_id) + .fetch_optional(pool) + .await?; + + if let Some(row) = row { + row.try_get::("Username").unwrap_or_else(|_| "Unknown User".to_string()) + } else { + return Err(AppError::not_found("User not found")); + } } - - play_state_conditions.push(partial_condition); - } - - if config.include_played { - play_state_conditions.push("h.listenduration >= e.episodeduration".to_string()); - } - - if !play_state_conditions.is_empty() { - select_query.push_str(&format!(" AND ({})", play_state_conditions.join(" OR "))); - } - - // Add sorting and grouping - EXACT PYTHON LOGIC - let sort_order = config.get_postgres_sort_order(); - if config.group_by_podcast { - select_query.push_str(&format!(" ORDER BY p.podcastid, {}", sort_order.replace("ORDER BY ", ""))); - } else { - select_query.push_str(&format!(" {}", sort_order)); - } - - // Add limit - if let Some(max_episodes) = config.max_episodes { - select_query.push_str(&format!(" LIMIT {}", max_episodes)); - } - - // Now wrap the SELECT query in INSERT with ROW_NUMBER() - FIXED ALIAS SCOPING - let sort_for_insert = if config.group_by_podcast { - format!("ORDER BY episodes.podcastid, {}", config.get_postgres_outer_sort_order().replace("ORDER BY ", "")) - } else { - config.get_postgres_outer_sort_order() }; - let insert_query = format!(r#" - INSERT INTO "PlaylistContents" (playlistid, episodeid, position) - SELECT $1, episodes.episodeid, ROW_NUMBER() OVER ({}) as position - FROM ({}) episodes - "#, sort_for_insert, select_query); - - // Final params: playlist_id first, then all query params - let mut final_params = vec![playlist_id]; - final_params.extend(all_params); - - Ok((insert_query, final_params)) - } - - // Build complete MySQL query with all filters - EXACT PYTHON MATCH - fn build_complete_mysql_query(&self, base_query: String, params: Vec, config: &PlaylistConfig, playlist_id: i32) -> AppResult<(String, Vec)> { - // Build proper SELECT query first - need to include columns for ordering in subquery - let mut select_columns = vec![ - "e.EpisodeID".to_string(), - "p.PodcastID".to_string(), - "e.EpisodePubDate".to_string(), - "e.EpisodeDuration".to_string(), - "COALESCE(h.ListenDuration, 0) as ListenDuration".to_string() - ]; - - let mut select_query = base_query.replace( - "SELECT e.EpisodeID, p.PodcastID, u.TimeZone", - &format!("SELECT {}", select_columns.join(", ")) - ); - - let mut all_params = params; - - // Add podcast filter (MySQL JSON/IN support) - if let Some(ref podcast_ids) = config.podcast_ids { - if !podcast_ids.is_empty() { - if podcast_ids.len() == 1 { - select_query.push_str(" AND p.PodcastID = ?"); - all_params.push(podcast_ids[0]); - } else { - let placeholders: String = podcast_ids.iter().map(|_| "?").collect::>().join(","); - select_query.push_str(&format!(" AND p.PodcastID IN ({})", placeholders)); - all_params.extend(podcast_ids); + // Get podcast details for feed metadata - exact Python logic + let (podcast_name, feed_image, feed_description) = if podcast_filter { + match self { + DatabasePool::Postgres(pool) => { + let row = sqlx::query(r#"SELECT podcastname, artworkurl, description FROM "Podcasts" WHERE podcastid = ANY($1)"#) + .bind(&effective_podcast_ids) + .fetch_optional(pool) + .await?; + + if let Some(row) = row { + ( + row.try_get::("podcastname").unwrap_or_else(|_| "Unknown Podcast".to_string()), + row.try_get::, _>("artworkurl").unwrap_or_default().unwrap_or_else(|| format!("{}/static/assets/favicon.png", domain)), + row.try_get::("description").unwrap_or_else(|_| "No description available".to_string()), + ) + } else { + ("Unknown Podcast".to_string(), format!("{}/static/assets/favicon.png", domain), "No description available".to_string()) + } + } + DatabasePool::MySQL(pool) => { + if effective_podcast_ids.len() == 1 { + let row = sqlx::query("SELECT PodcastName, ArtworkURL, Description FROM Podcasts WHERE PodcastID = ?") + .bind(effective_podcast_ids[0]) + .fetch_optional(pool) + .await?; + + if let Some(row) = row { + ( + row.try_get::("PodcastName").unwrap_or_else(|_| "Unknown Podcast".to_string()), + row.try_get::, _>("ArtworkURL").unwrap_or_default().unwrap_or_else(|| format!("{}/static/assets/favicon.png", domain)), + row.try_get::("Description").unwrap_or_else(|_| "No description available".to_string()), + ) + } else { + ("Unknown Podcast".to_string(), format!("{}/static/assets/favicon.png", domain), "No description available".to_string()) + } + } else { + let placeholders = vec!["?"; effective_podcast_ids.len()].join(","); + let query_str = format!("SELECT PodcastName, ArtworkURL, Description FROM Podcasts WHERE PodcastID IN ({})", placeholders); + let mut query = sqlx::query(&query_str); + for &id in &effective_podcast_ids { + query = query.bind(id); + } + let row = query.fetch_optional(pool).await?; + + if let Some(row) = row { + ( + row.try_get::("PodcastName").unwrap_or_else(|_| "Unknown Podcast".to_string()), + row.try_get::, _>("ArtworkURL").unwrap_or_default().unwrap_or_else(|| format!("{}/static/assets/favicon.png", domain)), + row.try_get::("Description").unwrap_or_else(|_| "No description available".to_string()), + ) + } else { + ("Unknown Podcast".to_string(), format!("{}/static/assets/favicon.png", domain), "No description available".to_string()) + } + } } } - } + } else { + ("All Podcasts".to_string(), format!("{}/static/assets/favicon.png", domain), "RSS feed for all podcasts from Pinepods".to_string()) + }; + + // Build RSS feed using quick-xml for proper XML escaping + use quick_xml::events::{Event, BytesStart, BytesEnd, BytesText, BytesCData}; + use quick_xml::Writer; + use std::io::Cursor; + + let mut writer = Writer::new(Cursor::new(Vec::new())); - // Add duration filters - if let Some(min_duration) = config.min_duration { - select_query.push_str(" AND e.EpisodeDuration >= ?"); - all_params.push(min_duration); - } + // XML declaration + writer.write_event(Event::Decl(quick_xml::events::BytesDecl::new("1.0", Some("UTF-8"), None)))?; - if let Some(max_duration) = config.max_duration { - select_query.push_str(" AND e.EpisodeDuration <= ?"); - all_params.push(max_duration); - } + // RSS root element with namespace + let mut rss_elem = BytesStart::new("rss"); + rss_elem.push_attribute(("version", "2.0")); + rss_elem.push_attribute(("xmlns:itunes", "http://www.itunes.com/dtds/podcast-1.0.dtd")); + writer.write_event(Event::Start(rss_elem))?; - // Add time filter with timezone awareness - if let Some(time_filter_hours) = config.time_filter_hours { - select_query.push_str(&format!( - " AND CONVERT_TZ(e.EpisodePubDate, 'UTC', COALESCE(u.TimeZone, 'UTC')) > \ - DATE_SUB(CONVERT_TZ(NOW(), 'UTC', COALESCE(u.TimeZone, 'UTC')), INTERVAL {} HOUR)", - time_filter_hours - )); - } + // Channel + writer.write_event(Event::Start(BytesStart::new("channel")))?; - // Add play state filters - EXACT PYTHON LOGIC - let mut play_state_conditions = Vec::new(); + // Channel metadata + writer.write_event(Event::Start(BytesStart::new("title")))?; + writer.write_event(Event::Text(BytesText::new(&format!("Pinepods - {}", podcast_name))))?; + writer.write_event(Event::End(BytesEnd::new("title")))?; - if config.include_unplayed { - play_state_conditions.push("h.ListenDuration IS NULL".to_string()); - } + writer.write_event(Event::Start(BytesStart::new("link")))?; + writer.write_event(Event::Text(BytesText::new("https://github.com/madeofpendletonwool/pinepods")))?; + writer.write_event(Event::End(BytesEnd::new("link")))?; - if config.include_partially_played { - let mut partial_condition = "(h.ListenDuration > 0 AND h.ListenDuration < e.EpisodeDuration AND e.Completed = FALSE)".to_string(); - - if let Some(min_progress) = config.play_progress_min { - partial_condition.push_str(&format!(" AND (h.ListenDuration / NULLIF(e.EpisodeDuration, 0)) >= {}", min_progress / 100.0)); - } - - if let Some(max_progress) = config.play_progress_max { - partial_condition.push_str(&format!(" AND (h.ListenDuration / NULLIF(e.EpisodeDuration, 0)) <= {}", max_progress / 100.0)); - } - - play_state_conditions.push(partial_condition); - } + writer.write_event(Event::Start(BytesStart::new("description")))?; + writer.write_event(Event::Text(BytesText::new(&feed_description)))?; + writer.write_event(Event::End(BytesEnd::new("description")))?; - if config.include_played { - play_state_conditions.push("h.ListenDuration >= e.EpisodeDuration".to_string()); - } + writer.write_event(Event::Start(BytesStart::new("language")))?; + writer.write_event(Event::Text(BytesText::new("en")))?; + writer.write_event(Event::End(BytesEnd::new("language")))?; - if !play_state_conditions.is_empty() { - select_query.push_str(&format!(" AND ({})", play_state_conditions.join(" OR "))); - } + writer.write_event(Event::Start(BytesStart::new("itunes:author")))?; + writer.write_event(Event::Text(BytesText::new(&username)))?; + writer.write_event(Event::End(BytesEnd::new("itunes:author")))?; - // Add sorting and grouping - EXACT PYTHON LOGIC - let sort_order = config.get_mysql_sort_order(); - if config.group_by_podcast { - select_query.push_str(&format!(" ORDER BY p.PodcastID, {}", sort_order.replace("ORDER BY ", ""))); - } else { - select_query.push_str(&format!(" {}", sort_order)); - } + // iTunes image + let mut itunes_image = BytesStart::new("itunes:image"); + itunes_image.push_attribute(("href", feed_image.as_str())); + writer.write_event(Event::Empty(itunes_image))?; - // Add limit - if let Some(max_episodes) = config.max_episodes { - select_query.push_str(&format!(" LIMIT {}", max_episodes)); - } + // RSS image block + writer.write_event(Event::Start(BytesStart::new("image")))?; - // Now wrap the SELECT query in INSERT with ROW_NUMBER() - FIXED ALIAS SCOPING - let sort_for_insert = if config.group_by_podcast { - format!("ORDER BY episodes.PodcastID, {}", config.get_mysql_outer_sort_order().replace("ORDER BY ", "")) - } else { - config.get_mysql_outer_sort_order() - }; + writer.write_event(Event::Start(BytesStart::new("url")))?; + writer.write_event(Event::Text(BytesText::new(&feed_image)))?; + writer.write_event(Event::End(BytesEnd::new("url")))?; - let insert_query = format!(r#" - INSERT INTO PlaylistContents (PlaylistID, EpisodeID, Position) - SELECT ?, episodes.EpisodeID, ROW_NUMBER() OVER ({}) as position - FROM ({}) episodes - "#, sort_for_insert, select_query); + writer.write_event(Event::Start(BytesStart::new("title")))?; + writer.write_event(Event::Text(BytesText::new(&format!("Pinepods - {}", podcast_name))))?; + writer.write_event(Event::End(BytesEnd::new("title")))?; - // Final params: playlist_id first, then all query params - let mut final_params = vec![playlist_id]; - final_params.extend(all_params); + writer.write_event(Event::Start(BytesStart::new("link")))?; + writer.write_event(Event::Text(BytesText::new("https://github.com/madeofpendletonwool/pinepods")))?; + writer.write_event(Event::End(BytesEnd::new("link")))?; - Ok((insert_query, final_params)) - } - - // Add play state filters for PostgreSQL - fn add_play_state_filters_postgres(&self, query: &mut String, params: &mut Vec, param_index: &mut usize, config: &PlaylistConfig) -> AppResult<()> { - let mut play_state_conditions = Vec::new(); + writer.write_event(Event::End(BytesEnd::new("image")))?; - if config.include_unplayed { - play_state_conditions.push("h.listenduration IS NULL".to_string()); - } + // TTL + writer.write_event(Event::Start(BytesStart::new("ttl")))?; + writer.write_event(Event::Text(BytesText::new("60")))?; + writer.write_event(Event::End(BytesEnd::new("ttl")))?; - if config.include_partially_played { - let mut partial_condition = "(h.listenduration > 0 AND h.listenduration < e.episodeduration AND e.completed = FALSE)".to_string(); + // Get or create RSS key for this user to use in stream URLs + let user_rss_key = self.get_or_create_user_rss_key(user_id).await?; + + // Get episodes (use the user's RSS key for stream URLs, not the requesting key) + let episodes = self.get_rss_episodes(user_id, limit, source_type, &effective_podcast_ids, podcast_filter, domain, &user_rss_key).await?; + + // Write episodes + for episode in episodes { + writer.write_event(Event::Start(BytesStart::new("item")))?; - if let Some(min_progress) = config.play_progress_min { - partial_condition.push_str(&format!(" AND (h.listenduration::float / NULLIF(e.episodeduration, 0)) >= {}", min_progress / 100.0)); - } + // Title (using CDATA for safety) + writer.write_event(Event::Start(BytesStart::new("title")))?; + writer.write_event(Event::CData(BytesCData::new(&episode.title)))?; + writer.write_event(Event::End(BytesEnd::new("title")))?; - if let Some(max_progress) = config.play_progress_max { - partial_condition.push_str(&format!(" AND (h.listenduration::float / NULLIF(e.episodeduration, 0)) <= {}", max_progress / 100.0)); - } + // Link (URL will be properly escaped) + writer.write_event(Event::Start(BytesStart::new("link")))?; + writer.write_event(Event::Text(BytesText::new(&episode.url)))?; + writer.write_event(Event::End(BytesEnd::new("link")))?; - play_state_conditions.push(partial_condition); - } - - if config.include_played { - play_state_conditions.push("h.listenduration >= e.episodeduration".to_string()); - } - - if !play_state_conditions.is_empty() { - query.push_str(&format!(" AND ({})", play_state_conditions.join(" OR "))); - } - - Ok(()) - } - - // Add play state filters for MySQL - fn add_play_state_filters_mysql(&self, query: &mut String, config: &PlaylistConfig) -> AppResult<()> { - let mut play_state_conditions = Vec::new(); - - if config.include_unplayed { - play_state_conditions.push("h.ListenDuration IS NULL".to_string()); - } - - if config.include_partially_played { - let mut partial_condition = "(h.ListenDuration > 0 AND h.ListenDuration < e.EpisodeDuration AND e.Completed = FALSE)".to_string(); + // Description (using CDATA for safety) + writer.write_event(Event::Start(BytesStart::new("description")))?; + writer.write_event(Event::CData(BytesCData::new(&episode.description)))?; + writer.write_event(Event::End(BytesEnd::new("description")))?; - if let Some(min_progress) = config.play_progress_min { - partial_condition.push_str(&format!(" AND (h.ListenDuration / NULLIF(e.EpisodeDuration, 0)) >= {}", min_progress / 100.0)); + // GUID + writer.write_event(Event::Start(BytesStart::new("guid")))?; + writer.write_event(Event::Text(BytesText::new(&episode.url)))?; + writer.write_event(Event::End(BytesEnd::new("guid")))?; + + // Pub date + writer.write_event(Event::Start(BytesStart::new("pubDate")))?; + writer.write_event(Event::Text(BytesText::new(&episode.pub_date)))?; + writer.write_event(Event::End(BytesEnd::new("pubDate")))?; + + // Author (if present) + if let Some(ref author) = episode.author { + writer.write_event(Event::Start(BytesStart::new("itunes:author")))?; + writer.write_event(Event::Text(BytesText::new(author)))?; + writer.write_event(Event::End(BytesEnd::new("itunes:author")))?; } - if let Some(max_progress) = config.play_progress_max { - partial_condition.push_str(&format!(" AND (h.ListenDuration / NULLIF(e.EpisodeDuration, 0)) <= {}", max_progress / 100.0)); + // Artwork (if present) + if let Some(ref artwork_url) = episode.artwork_url { + let mut itunes_img = BytesStart::new("itunes:image"); + itunes_img.push_attribute(("href", artwork_url.as_str())); + writer.write_event(Event::Empty(itunes_img))?; } - play_state_conditions.push(partial_condition); + // Duration (iTunes format: HH:MM:SS or MM:SS) + if let Some(duration_seconds) = episode.duration { + let hours = duration_seconds / 3600; + let minutes = (duration_seconds % 3600) / 60; + let seconds = duration_seconds % 60; + + let duration_str = if hours > 0 { + format!("{:02}:{:02}:{:02}", hours, minutes, seconds) + } else { + format!("{:02}:{:02}", minutes, seconds) + }; + + writer.write_event(Event::Start(BytesStart::new("itunes:duration")))?; + writer.write_event(Event::Text(BytesText::new(&duration_str)))?; + writer.write_event(Event::End(BytesEnd::new("itunes:duration")))?; + } + + // Enclosure (length should be file size in bytes, using duration as placeholder) + let mut enclosure = BytesStart::new("enclosure"); + enclosure.push_attribute(("url", episode.url.as_str())); + enclosure.push_attribute(("length", episode.duration.unwrap_or(0).to_string().as_str())); + enclosure.push_attribute(("type", "audio/mpeg")); + writer.write_event(Event::Empty(enclosure))?; + + writer.write_event(Event::End(BytesEnd::new("item")))?; } - if config.include_played { - play_state_conditions.push("h.ListenDuration >= e.EpisodeDuration".to_string()); - } + // Close channel and RSS + writer.write_event(Event::End(BytesEnd::new("channel")))?; + writer.write_event(Event::End(BytesEnd::new("rss")))?; - if !play_state_conditions.is_empty() { - query.push_str(&format!(" AND ({})", play_state_conditions.join(" OR "))); - } + // Convert to string + let result = writer.into_inner().into_inner(); + let rss_content = String::from_utf8(result) + .map_err(|e| AppError::internal(&format!("Failed to convert RSS to UTF-8: {}", e)))?; - Ok(()) - } - - // Add common filters for PostgreSQL (simplified helper) - fn add_common_filters_postgres(&self, _query: &mut String, _params: &mut Vec + Send + 'static>>, _param_index: &mut usize, _config: &PlaylistConfig, _user_id: i32) -> AppResult<()> { - // Implementation for duration, time, podcast filters - // Simplified for now due to complexity of dynamic parameters - Ok(()) + Ok(rss_content) } - - // Helper function to parse categories JSON string into HashMap - matches Python version - fn parse_categories_json(&self, categories_str: &str) -> Option> { - if categories_str.is_empty() { - return Some(std::collections::HashMap::new()); - } - - if categories_str.starts_with('{') { - // Try to parse as JSON first - if let Ok(parsed) = serde_json::from_str::>(categories_str) { - return Some(parsed); + + // Get user RSS key - matches Python get_user_rss_key function + pub async fn get_user_rss_key(&self, user_id: i32) -> AppResult> { + match self { + DatabasePool::Postgres(pool) => { + let row = sqlx::query(r#"SELECT rsskey FROM "RssKeys" WHERE userid = $1"#) + .bind(user_id) + .fetch_optional(pool) + .await?; + + if let Some(row) = row { + Ok(row.try_get("rsskey").ok()) + } else { + Ok(None) + } } - } else { - // Fall back to comma-separated parsing like Python version - let mut result = std::collections::HashMap::new(); - for (i, cat) in categories_str.split(',').enumerate() { - result.insert(i.to_string(), cat.trim().to_string()); + DatabasePool::MySQL(pool) => { + let row = sqlx::query("SELECT RssKey FROM RssKeys WHERE UserID = ?") + .bind(user_id) + .fetch_optional(pool) + .await?; + + if let Some(row) = row { + Ok(row.try_get("RssKey").ok()) + } else { + Ok(None) + } } - return Some(result); - } - - // Return empty map if parsing fails - Some(std::collections::HashMap::new()) - } - - // Execute the final playlist query for PostgreSQL - FIXED VERSION - async fn execute_playlist_query_postgres(&self, pool: &Pool, query: &str, params: &[i32], _playlist_id: i32) -> AppResult { - tracing::info!("Executing PostgreSQL playlist query with {} parameters", params.len()); - tracing::debug!("Query: {}", query); - tracing::debug!("Params: {:?}", params); - - // Build query with proper parameter binding - let mut sqlx_query = sqlx::query(query); - for param in params { - sqlx_query = sqlx_query.bind(*param); } - - let result = sqlx_query.execute(pool).await?; - Ok(result.rows_affected() as i32) - } - - // Execute the final playlist query for MySQL - async fn execute_playlist_query_mysql(&self, pool: &Pool, query: &str, params: &[i32], _playlist_id: i32) -> AppResult { - // Simplified execution - full implementation would handle dynamic parameters properly - let result = sqlx::query(query) - .bind(params.get(0).unwrap_or(&0)) - .execute(pool) - .await?; - - Ok(result.rows_affected() as i32) } - - // Get podcast details - matches Python get_podcast_details function exactly - pub async fn get_podcast_details(&self, user_id: i32, podcast_id: i32) -> AppResult { + + // Get or create RSS key for user - ensures user always has an RSS key for stream URLs + pub async fn get_or_create_user_rss_key(&self, user_id: i32) -> AppResult { match self { DatabasePool::Postgres(pool) => { - // First try to get podcast for specific user - let mut podcast_row = sqlx::query(r#" - SELECT podcastid, podcastname, feedurl, description, author, artworkurl, - explicit, episodecount, categories, websiteurl, podcastindexid, isyoutubechannel, - userid, autodownload, startskip, endskip, username, password, notificationsenabled, feedcutoffdays, - playbackspeed, playbackspeedcustomized - FROM "Podcasts" - WHERE podcastid = $1 AND userid = $2 - "#) - .bind(podcast_id) + // Try to get existing RSS key + let existing_key = sqlx::query(r#"SELECT rsskey FROM "RssKeys" WHERE userid = $1 LIMIT 1"#) .bind(user_id) .fetch_optional(pool) .await?; - - // Fallback to admin/public podcasts (userid = 1) if not found for user - if podcast_row.is_none() { - podcast_row = sqlx::query(r#" - SELECT podcastid, podcastname, feedurl, description, author, artworkurl, - explicit, episodecount, categories, websiteurl, podcastindexid, isyoutubechannel, - userid, autodownload, startskip, endskip, username, password, notificationsenabled, feedcutoffdays, - playbackspeed, playbackspeedcustomized - FROM "Podcasts" - WHERE podcastid = $1 AND userid = 1 - "#) - .bind(podcast_id) - .fetch_optional(pool) + + if let Some(row) = existing_key { + let key: String = row.try_get("rsskey")?; + Ok(key) + } else { + // Create new RSS key + let new_key = uuid::Uuid::new_v4().to_string(); + sqlx::query(r#"INSERT INTO "RssKeys" (userid, rsskey) VALUES ($1, $2)"#) + .bind(user_id) + .bind(&new_key) + .execute(pool) .await?; + Ok(new_key) } - - let row = podcast_row.ok_or_else(|| AppError::not_found("Podcast not found"))?; - - // Get episode count (special handling for YouTube channels) - let is_youtube: bool = row.try_get("isyoutubechannel").unwrap_or(false); - let episode_count = if is_youtube { - // Get count from YouTubeVideos table for YouTube channels - let count_row = sqlx::query(r#"SELECT COUNT(*) as count FROM "YouTubeVideos" WHERE podcastid = $1"#) - .bind(podcast_id) - .fetch_one(pool) - .await?; - count_row.try_get::("count")? as i32 - } else { - row.try_get("episodecount").unwrap_or(0) - }; - - // Get categories and parse to HashMap - matches Python version exactly - let categories_str = row.try_get::("categories").unwrap_or_else(|_| String::new()); - let categories = self.parse_categories_json(&categories_str); - - Ok(serde_json::json!({ - "podcastid": row.try_get::("podcastid")?, - "podcastindexid": row.try_get::("podcastindexid").unwrap_or(0), - "podcastname": row.try_get::("podcastname").unwrap_or_else(|_| "Unknown Podcast".to_string()), - "artworkurl": row.try_get::("artworkurl").unwrap_or_else(|_| String::new()), - "author": row.try_get::("author").unwrap_or_else(|_| "Unknown Author".to_string()), - "categories": categories, - "description": row.try_get::("description").unwrap_or_else(|_| String::new()), - "episodecount": episode_count, - "feedurl": row.try_get::("feedurl").unwrap_or_else(|_| String::new()), - "websiteurl": row.try_get::("websiteurl").unwrap_or_else(|_| String::new()), - "explicit": row.try_get::("explicit").unwrap_or(false), - "userid": row.try_get::("userid")?, - "autodownload": row.try_get::("autodownload").unwrap_or(false), - "startskip": row.try_get::("startskip").unwrap_or(0), - "endskip": row.try_get::("endskip").unwrap_or(0), - "username": row.try_get::, _>("username")?, - "password": row.try_get::, _>("password")?, - "isyoutubechannel": is_youtube, - "notificationsenabled": row.try_get::("notificationsenabled").unwrap_or(false), - "feedcutoffdays": row.try_get::("feedcutoffdays").unwrap_or(0), - "playbackspeedcustomized": row.try_get::("playbackspeedcustomized").unwrap_or(false), - "playbackspeed": row.try_get::("playbackspeed").unwrap_or(1.0) - })) } DatabasePool::MySQL(pool) => { - // First try to get podcast for specific user - let mut podcast_row = sqlx::query(r#" - SELECT PodcastID, PodcastName, FeedURL, Description, Author, ArtworkURL, - Explicit, EpisodeCount, Categories, WebsiteURL, PodcastIndexID, IsYouTubeChannel, - UserID, AutoDownload, StartSkip, EndSkip, Username, Password, NotificationsEnabled, FeedCutoffDays, - PlaybackSpeed, PlaybackSpeedCustomized - FROM Podcasts - WHERE PodcastID = ? AND UserID = ? - "#) - .bind(podcast_id) + // Try to get existing RSS key + let existing_key = sqlx::query("SELECT RssKey FROM RssKeys WHERE UserID = ? LIMIT 1") .bind(user_id) .fetch_optional(pool) .await?; - - // Fallback to admin/public podcasts (UserID = 1) if not found for user - if podcast_row.is_none() { - podcast_row = sqlx::query(r#" - SELECT PodcastID, PodcastName, FeedURL, Description, Author, ArtworkURL, - Explicit, EpisodeCount, Categories, WebsiteURL, PodcastIndexID, IsYouTubeChannel, - UserID, AutoDownload, StartSkip, EndSkip, Username, Password, NotificationsEnabled, FeedCutoffDays, - PlaybackSpeed, PlaybackSpeedCustomized - FROM Podcasts - WHERE PodcastID = ? AND UserID = 1 - "#) - .bind(podcast_id) - .fetch_optional(pool) + + if let Some(row) = existing_key { + let key: String = row.try_get("RssKey")?; + Ok(key) + } else { + // Create new RSS key + let new_key = uuid::Uuid::new_v4().to_string(); + sqlx::query("INSERT INTO RssKeys (UserID, RssKey) VALUES (?, ?)") + .bind(user_id) + .bind(&new_key) + .execute(pool) .await?; + Ok(new_key) } + } + } + } + + // Helper function to get RSS episodes + async fn get_rss_episodes( + &self, + user_id: i32, + limit: i32, + source_type: Option<&str>, + podcast_ids: &[i32], + podcast_filter: bool, + domain: &str, + api_key: &str, + ) -> AppResult> { + use chrono::{DateTime, Utc}; + + match self { + DatabasePool::Postgres(pool) => { + let mut base_query = r#" + SELECT + e.episodeid, + e.podcastid, + e.episodetitle, + e.episodedescription, + CASE WHEN de.episodeid IS NULL + THEN e.episodeurl + ELSE CONCAT(CAST($1 AS TEXT), '/api/data/stream/', e.episodeid, '?api_key=', CAST($2 AS TEXT), '&user_id=', pp.userid) + END as episodeurl, + e.episodeartwork, + e.episodepubdate, + e.episodeduration, + pp.podcastname, + pp.author, + pp.artworkurl, + pp.description as podcastdescription + FROM "Episodes" e + JOIN "Podcasts" pp ON e.podcastid = pp.podcastid + LEFT JOIN "DownloadedEpisodes" de ON e.episodeid = de.episodeid + WHERE pp.userid = $3 + "#.to_string(); - let row = podcast_row.ok_or_else(|| AppError::not_found("Podcast not found"))?; + let mut param_count = 3; + if podcast_filter && !podcast_ids.is_empty() { + param_count += 1; + base_query.push_str(&format!(" AND pp.podcastid = ANY(${})", param_count)); + } - // Get episode count (special handling for YouTube channels) - let is_youtube: bool = row.try_get("IsYouTubeChannel").unwrap_or(false); - let episode_count = if is_youtube { - // Get count from YouTubeVideos table for YouTube channels - let count_row = sqlx::query("SELECT COUNT(*) as count FROM YouTubeVideos WHERE PodcastID = ?") - .bind(podcast_id) - .fetch_one(pool) - .await?; - count_row.try_get::("count")? as i32 - } else { - row.try_get("EpisodeCount").unwrap_or(0) - }; + // Add YouTube union if needed (exact Python logic) + let add_youtube_union = source_type.is_none() || source_type == Some("youtube"); + if add_youtube_union { + base_query.push_str(r#" + UNION ALL + SELECT + y.videoid as episodeid, + y.podcastid, + y.videotitle as episodetitle, + y.videodescription as episodedescription, + CONCAT(CAST($1 AS TEXT), '/api/data/stream/', CAST(y.videoid AS TEXT), '?api_key=', CAST($2 AS TEXT), '&type=youtube&user_id=', pv.userid) as episodeurl, + y.thumbnailurl as episodeartwork, + y.publishedat as episodepubdate, + y.duration as episodeduration, + pv.podcastname, + pv.author, + pv.artworkurl, + pv.description as podcastdescription + FROM "YouTubeVideos" y + JOIN "Podcasts" pv on y.podcastid = pv.podcastid + WHERE pv.userid = $3 + "#); - // Get categories and parse to HashMap - matches Python version exactly - let categories_str = row.try_get::("Categories").unwrap_or_else(|_| String::new()); - let categories = self.parse_categories_json(&categories_str); + if podcast_filter && !podcast_ids.is_empty() { + base_query.push_str(&format!(" AND pv.podcastid = ANY(${})", param_count)); + } + } - Ok(serde_json::json!({ - "podcastid": row.try_get::("PodcastID")?, - "podcastindexid": row.try_get::("PodcastIndexID").unwrap_or(0), - "podcastname": row.try_get::("PodcastName").unwrap_or_else(|_| "Unknown Podcast".to_string()), - "artworkurl": row.try_get::("ArtworkURL").unwrap_or_else(|_| String::new()), - "author": row.try_get::("Author").unwrap_or_else(|_| "Unknown Author".to_string()), - "categories": categories, - "description": row.try_get::("Description").unwrap_or_else(|_| String::new()), - "episodecount": episode_count, - "feedurl": row.try_get::("FeedURL").unwrap_or_else(|_| String::new()), - "websiteurl": row.try_get::("WebsiteURL").unwrap_or_else(|_| String::new()), - "explicit": row.try_get::("Explicit").unwrap_or(0) != 0, - "userid": row.try_get::("UserID")?, - "autodownload": row.try_get::("AutoDownload").unwrap_or(0) != 0, - "startskip": row.try_get::("StartSkip").unwrap_or(0), - "endskip": row.try_get::("EndSkip").unwrap_or(0), - "username": row.try_get::, _>("Username")?, - "password": row.try_get::, _>("Password")?, - "isyoutubechannel": is_youtube, - "notificationsenabled": row.try_get::("NotificationsEnabled").unwrap_or(0) != 0, - "feedcutoffdays": row.try_get::("FeedCutoffDays").unwrap_or(0), - "playbackspeedcustomized": row.try_get::("PlaybackSpeedCustomized").unwrap_or(0) != 0, - "playbackspeed": row.try_get::("PlaybackSpeed").unwrap_or(1.0) - })) - } - } - } + base_query.push_str(" ORDER BY episodepubdate DESC"); + if limit > 0 { + base_query.push_str(&format!(" LIMIT {}", limit)); + } - // OIDC Provider Management - matches Python OIDC functions + // Execute query + let mut query = sqlx::query(&base_query) + .bind(domain) + .bind(api_key) + .bind(user_id); - // Get OIDC provider by client ID - for callback processing - pub async fn get_oidc_provider_by_client_id(&self, client_id: &str) -> AppResult> { - match self { - DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#" - SELECT providerid, providername, clientid, clientsecret, authorizationurl, - tokenurl, userinfourl, scope, buttoncolor, buttontext, buttontextcolor, - iconsvg, enabled, nameclaim, emailclaim, usernameclaim, rolesclaim, - userrole, adminrole - FROM "OIDCProviders" - WHERE clientid = $1 AND enabled = true - "#) - .bind(client_id) - .fetch_optional(pool) - .await?; + if podcast_filter && !podcast_ids.is_empty() { + query = query.bind(podcast_ids); + } - if let Some(row) = row { - Ok(Some(serde_json::json!({ - "provider_id": row.try_get::("providerid")?, - "provider_name": row.try_get::("providername")?, - "client_id": row.try_get::("clientid")?, - "client_secret": row.try_get::("clientsecret")?, - "authorization_url": row.try_get::("authorizationurl")?, - "token_url": row.try_get::("tokenurl")?, - "userinfo_url": row.try_get::("userinfourl")?, - "scope": row.try_get::("scope")?, - "button_color": row.try_get::("buttoncolor")?, - "button_text": row.try_get::("buttontext")?, - "button_text_color": row.try_get::("buttontextcolor")?, - "icon_svg": row.try_get::, _>("iconsvg")?, - "enabled": row.try_get::("enabled")?, - "name_claim": row.try_get::, _>("nameclaim")?, - "email_claim": row.try_get::, _>("emailclaim")?, - "username_claim": row.try_get::, _>("usernameclaim")?, - "roles_claim": row.try_get::, _>("rolesclaim")?, - "user_role": row.try_get::, _>("userrole")?, - "admin_role": row.try_get::, _>("adminrole")? - }))) - } else { - Ok(None) + let rows = query.fetch_all(pool).await?; + + let mut episodes = Vec::new(); + for row in rows { + let title: String = row.try_get("episodetitle").unwrap_or_else(|_| "Untitled Episode".to_string()); + let description: String = row.try_get("episodedescription").unwrap_or_else(|_| String::new()); + let url: String = row.try_get("episodeurl").unwrap_or_else(|_| String::new()); + let duration: Option = row.try_get("episodeduration").ok(); + let author: Option = row.try_get("author").ok(); + // Use episode-specific artwork if available, otherwise fall back to podcast artwork + let episode_artwork: Option = row.try_get("episodeartwork").ok(); + let podcast_artwork: Option = row.try_get("artworkurl").ok(); + let artwork_url = episode_artwork.filter(|url| !url.is_empty()).or(podcast_artwork); + + let pub_date = if let Ok(dt) = row.try_get::, _>("episodepubdate") { + dt.format("%a, %d %b %Y %H:%M:%S %z").to_string() + } else { + Utc::now().format("%a, %d %b %Y %H:%M:%S %z").to_string() + }; + + episodes.push(RssEpisode { + title, + description, + url, + pub_date, + duration, + author, + artwork_url, + }); } + + Ok(episodes) } DatabasePool::MySQL(pool) => { - let row = sqlx::query(r#" - SELECT ProviderID, ProviderName, ClientID, ClientSecret, AuthorizationURL, - TokenURL, UserInfoURL, Scope, ButtonColor, ButtonText, ButtonTextColor, - IconSVG, Enabled, NameClaim, EmailClaim, UsernameClaim, RolesClaim, - UserRole, AdminRole - FROM OIDCProviders - WHERE ClientID = ? AND Enabled = true - "#) - .bind(client_id) - .fetch_optional(pool) - .await?; + let mut base_query = r#" + SELECT + e.EpisodeID, + e.PodcastID, + e.EpisodeTitle COLLATE utf8mb4_unicode_ci as EpisodeTitle, + e.EpisodeDescription COLLATE utf8mb4_unicode_ci as EpisodeDescription, + CASE WHEN de.EpisodeID IS NULL + THEN e.EpisodeURL COLLATE utf8mb4_unicode_ci + ELSE CONCAT(CAST(? AS CHAR), '/api/data/stream/', CAST(e.EpisodeID AS CHAR), '?api_key=', CAST(? AS CHAR), '&user_id=', pp.UserID) + END COLLATE utf8mb4_unicode_ci as EpisodeURL, + e.EpisodeArtwork COLLATE utf8mb4_unicode_ci as EpisodeArtwork, + e.EpisodePubDate, + e.EpisodeDuration, + pp.PodcastName COLLATE utf8mb4_unicode_ci as PodcastName, + pp.Author COLLATE utf8mb4_unicode_ci as Author, + pp.ArtworkURL COLLATE utf8mb4_unicode_ci as ArtworkURL, + pp.Description COLLATE utf8mb4_unicode_ci as PodcastDescription + FROM Episodes e + JOIN Podcasts pp ON e.PodcastID = pp.PodcastID + LEFT JOIN DownloadedEpisodes de ON e.EpisodeID = de.EpisodeID + WHERE pp.UserID = ? + "#.to_string(); - if let Some(row) = row { - Ok(Some(serde_json::json!({ - "provider_id": row.try_get::("ProviderID")?, - "provider_name": row.try_get::("ProviderName")?, - "client_id": row.try_get::("ClientID")?, - "client_secret": row.try_get::("ClientSecret")?, - "authorization_url": row.try_get::("AuthorizationURL")?, - "token_url": row.try_get::("TokenURL")?, - "userinfo_url": row.try_get::("UserInfoURL")?, - "scope": row.try_get::("Scope")?, - "button_color": row.try_get::("ButtonColor")?, - "button_text": row.try_get::("ButtonText")?, - "button_text_color": row.try_get::("ButtonTextColor")?, - "icon_svg": row.try_get::, _>("IconSVG")?, - "enabled": row.try_get::("Enabled")? != 0, - "name_claim": row.try_get::, _>("NameClaim")?, - "email_claim": row.try_get::, _>("EmailClaim")?, - "username_claim": row.try_get::, _>("UsernameClaim")?, - "roles_claim": row.try_get::, _>("RolesClaim")?, - "user_role": row.try_get::, _>("UserRole")?, - "admin_role": row.try_get::, _>("AdminRole")? - }))) - } else { - Ok(None) + if podcast_filter && !podcast_ids.is_empty() { + let placeholders = vec!["?"; podcast_ids.len()].join(","); + base_query.push_str(&format!(" AND pp.PodcastID IN ({})", placeholders)); } - } - } - } - // Get OIDC provider - matches Python get_oidc_provider function EXACTLY - pub async fn get_oidc_provider(&self, client_id: &str) -> AppResult, Option, Option, Option, Option, Option)>> { - match self { - DatabasePool::Postgres(pool) => { - let result = sqlx::query(r#" - SELECT providerid, clientid, clientsecret, tokenurl, userinfourl, nameclaim, emailclaim, usernameclaim, rolesclaim, userrole, adminrole - FROM "OIDCProviders" - WHERE clientid = $1 AND enabled = true - "#) - .bind(client_id) - .fetch_optional(pool) - .await?; + // Add YouTube union if needed + let add_youtube_union = source_type.is_none() || source_type == Some("youtube"); + if add_youtube_union { + base_query.push_str(r#" + UNION ALL + SELECT + y.VideoID as EpisodeID, + y.PodcastID as PodcastID, + y.VideoTitle COLLATE utf8mb4_unicode_ci as EpisodeTitle, + y.VideoDescription COLLATE utf8mb4_unicode_ci as EpisodeDescription, + CONCAT(CAST(? AS CHAR), '/api/data/stream/', CAST(y.VideoID AS CHAR), '?api_key=', CAST(? AS CHAR), '&type=youtube&user_id=', pv.UserID) COLLATE utf8mb4_unicode_ci as EpisodeURL, + y.ThumbnailURL COLLATE utf8mb4_unicode_ci as EpisodeArtwork, + y.PublishedAt as EpisodePubDate, + y.Duration as EpisodeDuration, + pv.PodcastName COLLATE utf8mb4_unicode_ci as PodcastName, + pv.Author COLLATE utf8mb4_unicode_ci as Author, + pv.ArtworkURL COLLATE utf8mb4_unicode_ci as ArtworkURL, + pv.Description COLLATE utf8mb4_unicode_ci as PodcastDescription + FROM YouTubeVideos y + JOIN Podcasts pv on y.PodcastID = pv.PodcastID + WHERE pv.UserID = ? + "#); - if let Some(row) = result { - Ok(Some(( - row.try_get("providerid")?, - row.try_get("clientid")?, - row.try_get("clientsecret")?, - row.try_get("tokenurl")?, - row.try_get("userinfourl")?, - row.try_get("nameclaim")?, - row.try_get("emailclaim")?, - row.try_get("usernameclaim")?, - row.try_get("rolesclaim")?, - row.try_get("userrole")?, - row.try_get("adminrole")?, - ))) - } else { - Ok(None) + if podcast_filter && !podcast_ids.is_empty() { + let placeholders = vec!["?"; podcast_ids.len()].join(","); + base_query.push_str(&format!(" AND pv.PodcastID IN ({})", placeholders)); + } } - }, - DatabasePool::MySQL(pool) => { - let result = sqlx::query(r#" - SELECT ProviderID, ClientID, ClientSecret, TokenURL, UserInfoURL, NameClaim, EmailClaim, UsernameClaim, RolesClaim, UserRole, AdminRole - FROM OIDCProviders - WHERE ClientID = ? AND Enabled = true - "#) - .bind(client_id) - .fetch_optional(pool) - .await?; - if let Some(row) = result { - Ok(Some(( - row.try_get("ProviderID")?, - row.try_get("ClientID")?, - row.try_get("ClientSecret")?, - row.try_get("TokenURL")?, - row.try_get("UserInfoURL")?, - row.try_get("NameClaim")?, - row.try_get("EmailClaim")?, - row.try_get("UsernameClaim")?, - row.try_get("RolesClaim")?, - row.try_get("UserRole")?, - row.try_get("AdminRole")?, - ))) - } else { - Ok(None) + base_query.push_str(" ORDER BY EpisodePubDate DESC"); + if limit > 0 { + base_query.push_str(&format!(" LIMIT {}", limit)); } - } - } - } - // Get user by email - matches Python get_user_by_email function EXACTLY - pub async fn get_user_by_email(&self, email: &str) -> AppResult, Option, bool)>> { - match self { - DatabasePool::Postgres(pool) => { - let result = sqlx::query(r#" - SELECT userid, email, username, fullname, isadmin - FROM "Users" - WHERE email = $1 - "#) - .bind(email) - .fetch_optional(pool) - .await?; + // Build query with parameters + let mut query = sqlx::query(&base_query) + .bind(domain) + .bind(api_key) + .bind(user_id); - if let Some(row) = result { - Ok(Some(( - row.try_get("userid")?, - row.try_get("email")?, - row.try_get("username")?, - row.try_get("fullname")?, - row.try_get("isadmin")?, - ))) - } else { - Ok(None) + if podcast_filter && !podcast_ids.is_empty() { + for &id in podcast_ids { + query = query.bind(id); + } } - }, - DatabasePool::MySQL(pool) => { - let result = sqlx::query(r#" - SELECT UserID, Email, Username, Fullname, IsAdmin - FROM Users - WHERE Email = ? - "#) - .bind(email) - .fetch_optional(pool) - .await?; - if let Some(row) = result { - let is_admin: i32 = row.try_get("IsAdmin")?; - Ok(Some(( - row.try_get("UserID")?, - row.try_get("Email")?, - row.try_get("Username")?, - row.try_get("Fullname")?, - is_admin != 0, - ))) - } else { - Ok(None) + if add_youtube_union { + query = query.bind(domain).bind(api_key).bind(user_id); + if podcast_filter && !podcast_ids.is_empty() { + for &id in podcast_ids { + query = query.bind(id); + } + } + } + + let rows = query.fetch_all(pool).await?; + + let mut episodes = Vec::new(); + for row in rows { + let title: String = row.try_get("EpisodeTitle").unwrap_or_else(|_| "Untitled Episode".to_string()); + let description: String = row.try_get("EpisodeDescription").unwrap_or_else(|_| String::new()); + let url: String = row.try_get("EpisodeURL").unwrap_or_else(|_| String::new()); + let duration: Option = row.try_get("EpisodeDuration").ok(); + let author: Option = row.try_get("Author").ok(); + // Use episode-specific artwork if available, otherwise fall back to podcast artwork + let episode_artwork: Option = row.try_get("EpisodeArtwork").ok(); + let podcast_artwork: Option = row.try_get("ArtworkURL").ok(); + let artwork_url = episode_artwork.filter(|url| !url.is_empty()).or(podcast_artwork); + + let pub_date = if let Ok(dt) = row.try_get::, _>("EpisodePubDate") { + dt.format("%a, %d %b %Y %H:%M:%S %z").to_string() + } else { + Utc::now().format("%a, %d %b %Y %H:%M:%S %z").to_string() + }; + + episodes.push(RssEpisode { + title, + description, + url, + pub_date, + duration, + author, + artwork_url, + }); } + + Ok(episodes) } } } - // Check if username exists - matches Python check_usernames function EXACTLY - pub async fn check_usernames(&self, username: &str) -> AppResult { - match self { - DatabasePool::Postgres(pool) => { - let result = sqlx::query(r#"SELECT COUNT(*) as count FROM "Users" WHERE username = $1"#) - .bind(username) - .fetch_one(pool) - .await?; - let count: i64 = result.try_get("count")?; - Ok(count > 0) - }, - DatabasePool::MySQL(pool) => { - let result = sqlx::query(r#"SELECT COUNT(*) as count FROM Users WHERE Username = ?"#) - .bind(username) - .fetch_one(pool) - .await?; - let count: i64 = result.try_get("count")?; - Ok(count > 0) - } - } - } - - // Get user API key - matches Python get_user_api_key function EXACTLY - pub async fn get_user_api_key(&self, user_id: i32) -> AppResult> { + // Get podcast notification status - matches Python get_podcast_notification_status function + pub async fn get_podcast_notification_status(&self, podcast_id: i32, user_id: i32) -> AppResult { match self { DatabasePool::Postgres(pool) => { - let result = sqlx::query(r#" - SELECT api_key FROM "APIKeys" - WHERE userid = $1 - ORDER BY created DESC - LIMIT 1 + let row = sqlx::query(r#" + SELECT notificationsenabled + FROM "Podcasts" + WHERE podcastid = $1 AND userid = $2 "#) + .bind(podcast_id) .bind(user_id) .fetch_optional(pool) .await?; - - if let Some(row) = result { - Ok(Some(row.try_get("api_key")?)) + + if let Some(row) = row { + Ok(row.try_get("notificationsenabled").unwrap_or(false)) } else { - Ok(None) + Ok(false) } - }, + } DatabasePool::MySQL(pool) => { - let result = sqlx::query(r#" - SELECT APIKey FROM APIKeys - WHERE UserID = ? - ORDER BY Created DESC - LIMIT 1 - "#) + let row = sqlx::query(" + SELECT NotificationsEnabled + FROM Podcasts + WHERE PodcastID = ? AND UserID = ? + ") + .bind(podcast_id) .bind(user_id) .fetch_optional(pool) .await?; - - if let Some(row) = result { - Ok(Some(row.try_get("APIKey")?)) + + if let Some(row) = row { + let enabled: i8 = row.try_get("NotificationsEnabled").unwrap_or(0); + Ok(enabled != 0) } else { - Ok(None) + Ok(false) } } } } - // Create OIDC user - matches Python create_oidc_user function EXACTLY - pub async fn create_oidc_user(&self, email: &str, fullname: &str, username: &str) -> AppResult { - use base64::{Engine as _, engine::general_purpose::STANDARD}; - use rand::Rng; - - // Create salt exactly like Python version - let salt_bytes: [u8; 16] = rand::thread_rng().gen(); - let salt = STANDARD.encode(salt_bytes); - let hashed_password = format!("$argon2id$v=19$m=65536,t=3,p=4${}${}_OIDC_ACCOUNT_NO_PASSWORD", - salt, "X".repeat(43)); - + // Get MFA secret - matches Python get_mfa_secret function + pub async fn get_mfa_secret(&self, user_id: i32) -> AppResult> { match self { DatabasePool::Postgres(pool) => { - let result = sqlx::query(r#" - INSERT INTO "Users" (fullname, username, email, hashed_pw, isadmin) - VALUES ($1, $2, $3, $4, false) - RETURNING userid - "#) - .bind(fullname) - .bind(username) - .bind(email) - .bind(&hashed_password) - .fetch_one(pool) - .await?; - - let user_id: i32 = result.try_get("userid")?; - - // Add default user settings - sqlx::query(r#"INSERT INTO "UserSettings" (userid, theme) VALUES ($1, $2)"#) - .bind(user_id) - .bind("Nordic") - .execute(pool) - .await?; - - // Add default user stats - sqlx::query(r#"INSERT INTO "UserStats" (userid) VALUES ($1)"#) - .bind(user_id) - .execute(pool) - .await?; - - Ok(user_id) - }, - DatabasePool::MySQL(pool) => { - let result = sqlx::query(r#" - INSERT INTO Users (Fullname, Username, Email, Hashed_PW, IsAdmin) - VALUES (?, ?, ?, ?, 0) - "#) - .bind(fullname) - .bind(username) - .bind(email) - .bind(&hashed_password) - .execute(pool) - .await?; - - let user_id = result.last_insert_id() as i32; - - // Add default user settings - sqlx::query(r#"INSERT INTO UserSettings (UserID, Theme) VALUES (?, ?)"#) - .bind(user_id) - .bind("Nordic") - .execute(pool) - .await?; - - // Add default user stats - sqlx::query(r#"INSERT INTO UserStats (UserID) VALUES (?)"#) + let row = sqlx::query(r#"SELECT mfa_secret FROM "Users" WHERE userid = $1"#) .bind(user_id) - .execute(pool) - .await?; - - Ok(user_id) - } - } - } - - - - // Check if username exists - helper for OIDC user creation - pub async fn username_exists(&self, username: &str) -> AppResult { - match self { - DatabasePool::Postgres(pool) => { - let result = sqlx::query(r#"SELECT 1 FROM "Users" WHERE username = $1 LIMIT 1"#) - .bind(username) .fetch_optional(pool) .await?; - Ok(result.is_some()) + + if let Some(row) = row { + Ok(row.try_get("mfa_secret").ok()) + } else { + Ok(None) + } } DatabasePool::MySQL(pool) => { - let result = sqlx::query("SELECT 1 FROM Users WHERE Username = ? LIMIT 1") - .bind(username) + let row = sqlx::query("SELECT MFA_Secret FROM Users WHERE UserID = ?") + .bind(user_id) .fetch_optional(pool) .await?; - Ok(result.is_some()) + + if let Some(row) = row { + Ok(row.try_get("MFA_Secret").ok()) + } else { + Ok(None) + } } } } - // Create or get API key for user - for OIDC login completion - pub async fn create_or_get_api_key(&self, user_id: i32) -> AppResult { + // Return YouTube episodes - matches Python return_youtube_episodes function exactly + pub async fn return_youtube_episodes( + &self, + user_id: i32, + podcast_id: i32, + ) -> AppResult>> { match self { DatabasePool::Postgres(pool) => { - // Check for existing API key - let existing_key = sqlx::query(r#"SELECT apikey FROM "APIKeys" WHERE userid = $1 LIMIT 1"#) - .bind(user_id) - .fetch_optional(pool) - .await?; + let rows = sqlx::query(r#" + SELECT "Podcasts".podcastid, "Podcasts".podcastname, "YouTubeVideos".videoid AS episodeid, + "YouTubeVideos".videotitle AS episodetitle, "YouTubeVideos".publishedat AS episodepubdate, + "YouTubeVideos".videodescription AS episodedescription, + "YouTubeVideos".thumbnailurl AS episodeartwork, "YouTubeVideos".videourl AS episodeurl, + "YouTubeVideos".duration AS episodeduration, + "YouTubeVideos".listenposition AS listenduration, + "YouTubeVideos".youtubevideoid AS guid + FROM "YouTubeVideos" + INNER JOIN "Podcasts" ON "YouTubeVideos".podcastid = "Podcasts".podcastid + WHERE "Podcasts".podcastid = $1 AND "Podcasts".userid = $2 + ORDER BY "YouTubeVideos".publishedat DESC + "#) + .bind(podcast_id) + .bind(user_id) + .fetch_all(pool) + .await?; - if let Some(row) = existing_key { - return Ok(row.try_get("apikey")?); + if rows.is_empty() { + return Ok(None); } - // Generate new API key - let api_key = format!("pk_{}", uuid::Uuid::new_v4().simple()); - - sqlx::query(r#"INSERT INTO "APIKeys" (userid, apikey) VALUES ($1, $2)"#) - .bind(user_id) - .bind(&api_key) - .execute(pool) - .await?; + let mut episodes = Vec::new(); + for row in rows { + let episode = serde_json::json!({ + "Podcastid": row.try_get::("podcastid").unwrap_or(0), + "Podcastname": row.try_get::("podcastname").unwrap_or_default(), + "Episodeid": row.try_get::("episodeid").unwrap_or(0), + "Episodetitle": row.try_get::("episodetitle").unwrap_or_default(), + "Episodepubdate": row.try_get::("episodepubdate") + .map(|dt| dt.and_utc().to_rfc3339()) + .unwrap_or_default(), + "Episodedescription": row.try_get::("episodedescription").unwrap_or_default(), + "Episodeartwork": row.try_get::("episodeartwork").unwrap_or_default(), + "Episodeurl": row.try_get::("episodeurl").unwrap_or_default(), + "Episodeduration": row.try_get::("episodeduration").unwrap_or(0), + "Listenduration": row.try_get::("listenduration").unwrap_or(0), + "Guid": row.try_get::("guid").unwrap_or_default() + }); + episodes.push(episode); + } - Ok(api_key) + Ok(Some(episodes)) } DatabasePool::MySQL(pool) => { - let existing_key = sqlx::query("SELECT APIKey FROM APIKeys WHERE UserID = ? LIMIT 1") - .bind(user_id) - .fetch_optional(pool) - .await?; + let rows = sqlx::query(r#" + SELECT Podcasts.PodcastID, Podcasts.PodcastName, YouTubeVideos.VideoID AS EpisodeID, + YouTubeVideos.VideoTitle AS EpisodeTitle, YouTubeVideos.PublishedAt AS EpisodePubDate, + YouTubeVideos.VideoDescription AS EpisodeDescription, + YouTubeVideos.ThumbnailURL AS EpisodeArtwork, YouTubeVideos.VideoURL AS EpisodeURL, + YouTubeVideos.Duration AS EpisodeDuration, + YouTubeVideos.ListenPosition AS ListenDuration, + YouTubeVideos.YouTubeVideoID AS guid + FROM YouTubeVideos + INNER JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID + WHERE Podcasts.PodcastID = ? AND Podcasts.UserID = ? + ORDER BY YouTubeVideos.PublishedAt DESC + "#) + .bind(podcast_id) + .bind(user_id) + .fetch_all(pool) + .await?; - if let Some(row) = existing_key { - return Ok(row.try_get("APIKey")?); + if rows.is_empty() { + return Ok(None); } - let api_key = format!("pk_{}", uuid::Uuid::new_v4().simple()); - - sqlx::query("INSERT INTO APIKeys (UserID, APIKey) VALUES (?, ?)") - .bind(user_id) - .bind(&api_key) - .execute(pool) - .await?; + let mut episodes = Vec::new(); + for row in rows { + let episode = serde_json::json!({ + "Podcastid": row.try_get::("PodcastID").unwrap_or(0), + "Podcastname": row.try_get::("PodcastName").unwrap_or_default(), + "Episodeid": row.try_get::("EpisodeID").unwrap_or(0), + "Episodetitle": row.try_get::("EpisodeTitle").unwrap_or_default(), + "Episodepubdate": row.try_get::("EpisodePubDate") + .map(|dt| dt.and_utc().to_rfc3339()) + .unwrap_or_default(), + "Episodedescription": row.try_get::("EpisodeDescription").unwrap_or_default(), + "Episodeartwork": row.try_get::("EpisodeArtwork").unwrap_or_default(), + "Episodeurl": row.try_get::("EpisodeURL").unwrap_or_default(), + "Episodeduration": row.try_get::("EpisodeDuration").unwrap_or(0), + "Listenduration": row.try_get::("ListenDuration").unwrap_or(0), + "Guid": row.try_get::("guid").unwrap_or_default() + }); + episodes.push(episode); + } - Ok(api_key) + Ok(Some(episodes)) } } } - // Get playlist episodes - matches Python get_playlist_episodes function exactly - pub async fn get_playlist_episodes(&self, user_id: i32, playlist_id: i32) -> AppResult { - match self { + // Remove YouTube channel by URL - matches Python remove_youtube_channel_by_url function exactly + pub async fn remove_youtube_channel_by_url( + &self, + channel_name: &str, + channel_url: &str, + user_id: i32, + ) -> AppResult<()> { + println!("got to remove youtube channel"); + + // Get the PodcastID first + let podcast_id = match self { DatabasePool::Postgres(pool) => { - // Get playlist info with episode count - matches Python exactly - let playlist_row = sqlx::query(r#" - SELECT - p.name, - p.description, - (SELECT COUNT(*) - FROM "PlaylistContents" pc - JOIN "Episodes" e ON pc.episodeid = e.episodeid - JOIN "Podcasts" pod ON e.podcastid = pod.podcastid - LEFT JOIN "UserEpisodeHistory" h ON e.episodeid = h.episodeid AND h.userid = $1 - WHERE pc.playlistid = p.playlistid - AND (p.issystemplaylist = FALSE OR - (p.issystemplaylist = TRUE AND - (h.episodeid IS NOT NULL OR pod.userid = $2))) - ) as episode_count, - p.iconname, - p.issystemplaylist - FROM "Playlists" p - WHERE p.playlistid = $3 AND (p.userid = $4 OR p.issystemplaylist = TRUE) + let row = sqlx::query(r#" + SELECT podcastid + FROM "Podcasts" + WHERE podcastname = $1 + AND feedurl = $2 + AND userid = $3 + AND isyoutubechannel = TRUE "#) - .bind(user_id) - .bind(user_id) - .bind(playlist_id) - .bind(user_id) - .fetch_optional(pool) - .await?; + .bind(channel_name) + .bind(channel_url) + .bind(user_id) + .fetch_optional(pool) + .await?; - if playlist_row.is_none() { - return Err(AppError::not_found("Playlist not found")); + if let Some(row) = row { + row.try_get::("podcastid")? + } else { + return Err(AppError::external_error(&format!("No YouTube channel found with name {}", channel_name))); } - - let row = playlist_row.unwrap(); - let playlist_name: String = row.try_get("name")?; - let playlist_description: String = row.try_get("description").unwrap_or_default(); - let episode_count: i64 = row.try_get("episode_count")?; - let icon_name: String = row.try_get("iconname").unwrap_or_default(); - - // Get episodes in playlist - query PlaylistContents table joined with Episodes and Podcasts (matches Python) - let episodes_rows = sqlx::query(r#" - SELECT DISTINCT - "Episodes".episodeid, - "Episodes".episodetitle, - "Episodes".episodepubdate, - "Episodes".episodedescription, - "Episodes".episodeartwork, - "Episodes".episodeurl, - "Episodes".episodeduration, - "Episodes".completed, - "Podcasts".podcastname, - "Podcasts".podcastid, - "Podcasts".isyoutubechannel as is_youtube, - "UserEpisodeHistory".listenduration, - CASE WHEN "SavedEpisodes".episodeid IS NOT NULL THEN TRUE ELSE FALSE END AS saved, - CASE WHEN "EpisodeQueue".episodeid IS NOT NULL THEN TRUE ELSE FALSE END AS queued, - CASE WHEN "DownloadedEpisodes".episodeid IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded, - "PlaylistContents".dateadded - FROM "PlaylistContents" - JOIN "Episodes" ON "PlaylistContents".episodeid = "Episodes".episodeid - JOIN "Podcasts" ON "Episodes".podcastid = "Podcasts".podcastid - LEFT JOIN "UserEpisodeHistory" ON "Episodes".episodeid = "UserEpisodeHistory".episodeid - AND "UserEpisodeHistory".userid = $1 - LEFT JOIN "SavedEpisodes" ON "Episodes".episodeid = "SavedEpisodes".episodeid - AND "SavedEpisodes".userid = $1 - LEFT JOIN "EpisodeQueue" ON "Episodes".episodeid = "EpisodeQueue".episodeid - AND "EpisodeQueue".userid = $1 - AND "EpisodeQueue".is_youtube = FALSE - LEFT JOIN "DownloadedEpisodes" ON "Episodes".episodeid = "DownloadedEpisodes".episodeid - AND "DownloadedEpisodes".userid = $1 - WHERE "PlaylistContents".playlistid = $2 - ORDER BY "PlaylistContents".dateadded DESC + } + DatabasePool::MySQL(pool) => { + let row = sqlx::query(r#" + SELECT PodcastID + FROM Podcasts + WHERE PodcastName = ? + AND FeedURL = ? + AND UserID = ? + AND IsYouTubeChannel = TRUE "#) - .bind(user_id) - .bind(playlist_id) - .fetch_all(pool) - .await?; - - let mut episodes = Vec::new(); - for row in episodes_rows { - let episodeid: i32 = row.try_get("episodeid")?; - let episodetitle: String = row.try_get("episodetitle")?; - let naive = row.try_get::("episodepubdate")?; - let episodepubdate = naive.format("%Y-%m-%dT%H:%M:%S").to_string(); - let episodedescription: String = row.try_get("episodedescription")?; - let episodeartwork: String = row.try_get("episodeartwork")?; - let episodeurl: String = row.try_get("episodeurl")?; - let episodeduration: i32 = row.try_get("episodeduration")?; - let completed: bool = row.try_get("completed")?; - let podcastname: String = row.try_get("podcastname")?; - let podcastid: i32 = row.try_get("podcastid")?; - let is_youtube: bool = row.try_get("is_youtube")?; - let listenduration: Option = row.try_get("listenduration")?; - let saved: bool = row.try_get("saved")?; - let queued: bool = row.try_get("queued")?; - let downloaded: bool = row.try_get("downloaded")?; - let dateadded_naive = row.try_get::("dateadded")?; - let dateadded = dateadded_naive.format("%Y-%m-%dT%H:%M:%S").to_string(); + .bind(channel_name) + .bind(channel_url) + .bind(user_id) + .fetch_optional(pool) + .await?; - episodes.push(serde_json::json!({ - "episodeid": episodeid, - "episodetitle": episodetitle, - "episodepubdate": episodepubdate, - "episodedescription": episodedescription, - "episodeartwork": episodeartwork, - "episodeurl": episodeurl, - "episodeduration": episodeduration, - "completed": completed, - "podcastname": podcastname, - "podcastid": podcastid, - "is_youtube": is_youtube, - "listenduration": listenduration, - "saved": saved, - "queued": queued, - "downloaded": downloaded, - "dateadded": dateadded - })); + if let Some(row) = row { + row.try_get::("PodcastID")? + } else { + return Err(AppError::external_error(&format!("No YouTube channel found with name {}", channel_name))); } - - // Build playlist_info structure matching Python exactly - let playlist_info = serde_json::json!({ - "name": playlist_name, - "description": playlist_description, - "episode_count": episode_count, - "icon_name": icon_name - }); - - Ok(serde_json::json!({ - "playlist_info": playlist_info, - "episodes": episodes - })) } - DatabasePool::MySQL(pool) => { - // Get playlist info with episode count - matches Python exactly - let playlist_row = sqlx::query( - "SELECT - p.Name, - p.Description, - (SELECT COUNT(*) - FROM PlaylistContents pc - JOIN Episodes e ON pc.EpisodeID = e.EpisodeID - JOIN Podcasts pod ON e.PodcastID = pod.PodcastID - LEFT JOIN UserEpisodeHistory h ON e.EpisodeID = h.EpisodeID AND h.UserID = ? - WHERE pc.PlaylistID = p.PlaylistID - AND (p.IsSystemPlaylist = 0 OR - (p.IsSystemPlaylist = 1 AND - (h.EpisodeID IS NOT NULL OR pod.UserID = ?))) - ) as episode_count, - p.IconName, - p.IsSystemPlaylist - FROM Playlists p - WHERE p.PlaylistID = ? AND (p.UserID = ? OR p.IsSystemPlaylist = 1)" - ) - .bind(user_id) - .bind(user_id) - .bind(playlist_id) - .bind(user_id) - .fetch_optional(pool) - .await?; + }; - if playlist_row.is_none() { - return Err(AppError::not_found("Playlist not found")); - } + // Remove the channel by ID + self.remove_youtube_channel_by_id(podcast_id, user_id).await + } - let row = playlist_row.unwrap(); - let playlist_name: String = row.try_get("Name")?; - let playlist_description: String = row.try_get("Description").unwrap_or_default(); - let episode_count: i64 = row.try_get("episode_count")?; - let icon_name: String = row.try_get("IconName").unwrap_or_default(); + // Remove YouTube channel by ID - matches Python remove_youtube_channel function exactly + pub async fn remove_youtube_channel_by_id( + &self, + podcast_id: i32, + user_id: i32, + ) -> AppResult<()> { + // First, get all video IDs for the podcast so we can delete the files + let video_ids: Vec = match self { + DatabasePool::Postgres(pool) => { + let rows = sqlx::query(r#"SELECT youtubevideoid FROM "YouTubeVideos" WHERE podcastid = $1"#) + .bind(podcast_id) + .fetch_all(pool) + .await?; - // Get episodes in playlist - query PlaylistEpisodes table joined with Episodes and Podcasts - let episodes_rows = sqlx::query( - "SELECT DISTINCT - e.EpisodeID as episodeid, - e.EpisodeTitle as episodetitle, - e.EpisodePubDate as episodepubdate, - e.EpisodeDescription as episodedescription, - e.EpisodeArtwork as episodeartwork, - e.EpisodeURL as episodeurl, - e.EpisodeDuration as episodeduration, - e.Completed as completed, - p.PodcastName as podcastname, - p.PodcastID as podcastid, - p.IsYouTubeChannel as is_youtube, - ueh.ListenDuration as listenduration, - CASE WHEN se.EpisodeID IS NOT NULL THEN 1 ELSE 0 END AS saved, - CASE WHEN eq.EpisodeID IS NOT NULL THEN 1 ELSE 0 END AS queued, - CASE WHEN de.EpisodeID IS NOT NULL THEN 1 ELSE 0 END AS downloaded, - pc.DateAdded as addeddate - FROM PlaylistContents pc - JOIN Episodes e ON pc.EpisodeID = e.EpisodeID - JOIN Podcasts p ON e.PodcastID = p.PodcastID - LEFT JOIN UserEpisodeHistory ueh ON e.EpisodeID = ueh.EpisodeID AND ueh.UserID = ? - LEFT JOIN SavedEpisodes se ON e.EpisodeID = se.EpisodeID AND se.UserID = ? - LEFT JOIN EpisodeQueue eq ON e.EpisodeID = eq.EpisodeID AND eq.UserID = ? AND eq.is_youtube = 0 - LEFT JOIN DownloadedEpisodes de ON e.EpisodeID = de.EpisodeID AND de.UserID = ? - WHERE pc.PlaylistID = ? - ORDER BY pc.DateAdded DESC" - ) - .bind(user_id) - .bind(user_id) - .bind(user_id) - .bind(user_id) - .bind(playlist_id) + rows.into_iter() + .map(|row| row.try_get::("youtubevideoid").unwrap_or_default()) + .collect() + } + DatabasePool::MySQL(pool) => { + let rows = sqlx::query("SELECT YouTubeVideoID FROM YouTubeVideos WHERE PodcastID = ?") + .bind(podcast_id) .fetch_all(pool) .await?; - let mut episodes = Vec::new(); - for row in episodes_rows { - let episodeid: i32 = row.try_get("episodeid")?; - let episodetitle: String = row.try_get("episodetitle")?; - let naive = row.try_get::("episodepubdate")?; - let episodepubdate = naive.format("%Y-%m-%dT%H:%M:%S").to_string(); - let episodedescription: String = row.try_get("episodedescription")?; - let episodeartwork: String = row.try_get("episodeartwork")?; - let episodeurl: String = row.try_get("episodeurl")?; - let episodeduration: i32 = row.try_get("episodeduration")?; - let completed: bool = row.try_get::("completed")? != 0; - let podcastname: String = row.try_get("podcastname")?; - let podcastid: i32 = row.try_get("podcastid")?; - let is_youtube: bool = row.try_get::("is_youtube")? != 0; - let listenduration: Option = row.try_get("listenduration")?; - let saved: bool = row.try_get::("saved")? != 0; - let queued: bool = row.try_get::("queued")? != 0; - let downloaded: bool = row.try_get::("downloaded")? != 0; - let addeddate_naive = row.try_get::("addeddate")?; - let addeddate = addeddate_naive.format("%Y-%m-%dT%H:%M:%S").to_string(); - - episodes.push(serde_json::json!({ - "episodeid": episodeid, - "episodetitle": episodetitle, - "episodepubdate": episodepubdate, - "episodedescription": episodedescription, - "episodeartwork": episodeartwork, - "episodeurl": episodeurl, - "episodeduration": episodeduration, - "completed": completed, - "podcastname": podcastname, - "podcastid": podcastid, - "is_youtube": is_youtube, - "listenduration": listenduration, - "saved": saved, - "queued": queued, - "downloaded": downloaded, - "dateadded": addeddate - })); - } + rows.into_iter() + .map(|row| row.try_get::("YouTubeVideoID").unwrap_or_default()) + .collect() + } + }; - // Build playlist_info structure matching Python exactly - let playlist_info = serde_json::json!({ - "name": playlist_name, - "description": playlist_description, - "episode_count": episode_count, - "icon_name": icon_name - }); + // Delete the MP3 files for each video + for video_id in &video_ids { + let file_paths = vec![ + format!("/opt/pinepods/downloads/youtube/{}.mp3", video_id), + format!("/opt/pinepods/downloads/youtube/{}.mp3.mp3", video_id), // In case of double extension + ]; - Ok(serde_json::json!({ - "playlist_info": playlist_info, - "episodes": episodes - })) + for file_path in file_paths { + if tokio::fs::metadata(&file_path).await.is_ok() { + match tokio::fs::remove_file(&file_path).await { + Ok(_) => println!("Deleted file: {}", file_path), + Err(e) => println!("Failed to delete file {}: {}", file_path, e), + } + } } } - } - // Set user playback speed - matches Python set_playback_speed_user function - pub async fn set_playback_speed_user(&self, user_id: i32, playback_speed: f64) -> AppResult<()> { + // Delete from the related tables in the correct order match self { DatabasePool::Postgres(pool) => { - sqlx::query(r#"UPDATE "Users" SET playbackspeed = $1 WHERE userid = $2"#) - .bind(playback_speed) + let delete_queries = vec![ + r#"DELETE FROM "PlaylistContents" WHERE episodeid IN (SELECT videoid FROM "YouTubeVideos" WHERE podcastid = $1)"#, + r#"DELETE FROM "UserEpisodeHistory" WHERE episodeid IN (SELECT videoid FROM "YouTubeVideos" WHERE podcastid = $1)"#, + r#"DELETE FROM "UserVideoHistory" WHERE videoid IN (SELECT videoid FROM "YouTubeVideos" WHERE podcastid = $1)"#, + r#"DELETE FROM "DownloadedEpisodes" WHERE episodeid IN (SELECT videoid FROM "YouTubeVideos" WHERE podcastid = $1)"#, + r#"DELETE FROM "DownloadedVideos" WHERE videoid IN (SELECT videoid FROM "YouTubeVideos" WHERE podcastid = $1)"#, + r#"DELETE FROM "SavedVideos" WHERE videoid IN (SELECT videoid FROM "YouTubeVideos" WHERE podcastid = $1)"#, + r#"DELETE FROM "SavedEpisodes" WHERE episodeid IN (SELECT videoid FROM "YouTubeVideos" WHERE podcastid = $1)"#, + r#"DELETE FROM "EpisodeQueue" WHERE episodeid IN (SELECT videoid FROM "YouTubeVideos" WHERE podcastid = $1)"#, + r#"DELETE FROM "YouTubeVideos" WHERE podcastid = $1"#, + r#"DELETE FROM "Podcasts" WHERE podcastid = $1 AND isyoutubechannel = TRUE"#, + ]; + + for query in delete_queries { + sqlx::query(query) + .bind(podcast_id) + .execute(pool) + .await?; + } + + // Update user stats + sqlx::query(r#"UPDATE "UserStats" SET podcastsadded = podcastsadded - 1 WHERE userid = $1"#) .bind(user_id) .execute(pool) .await?; } DatabasePool::MySQL(pool) => { - sqlx::query("UPDATE Users SET PlaybackSpeed = ? WHERE UserID = ?") - .bind(playback_speed) + let delete_queries = vec![ + "DELETE FROM PlaylistContents WHERE EpisodeID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = ?)", + "DELETE FROM UserEpisodeHistory WHERE EpisodeID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = ?)", + "DELETE FROM UserVideoHistory WHERE VideoID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = ?)", + "DELETE FROM DownloadedEpisodes WHERE EpisodeID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = ?)", + "DELETE FROM DownloadedVideos WHERE VideoID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = ?)", + "DELETE FROM SavedVideos WHERE VideoID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = ?)", + "DELETE FROM SavedEpisodes WHERE EpisodeID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = ?)", + "DELETE FROM EpisodeQueue WHERE EpisodeID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = ?)", + "DELETE FROM YouTubeVideos WHERE PodcastID = ?", + "DELETE FROM Podcasts WHERE PodcastID = ? AND IsYouTubeChannel = TRUE", + ]; + + for query in delete_queries { + sqlx::query(query) + .bind(podcast_id) + .execute(pool) + .await?; + } + + // Update user stats + sqlx::query("UPDATE UserStats SET PodcastsAdded = PodcastsAdded - 1 WHERE UserID = ?") .bind(user_id) .execute(pool) .await?; } } + Ok(()) } - // Get all admin user IDs - matches Python add_news_feed_if_not_added logic - pub async fn get_all_admin_user_ids(&self) -> AppResult> { + // Get podcast ID by feed URL and title - for get_podcast_details_dynamic + pub async fn get_podcast_id_by_feed(&self, user_id: i32, feed_url: &str, _podcast_title: &str) -> AppResult { match self { DatabasePool::Postgres(pool) => { - let rows = sqlx::query(r#"SELECT userid FROM "Users" WHERE isadmin = TRUE"#) - .fetch_all(pool) - .await?; - - let user_ids: Vec = rows.into_iter() - .map(|row| row.try_get("userid")) - .collect::, _>>()?; - - Ok(user_ids) + let row = sqlx::query( + r#"SELECT podcastid FROM "Podcasts" WHERE feedurl = $1 AND userid = $2"# + ) + .bind(feed_url) + .bind(user_id) + .fetch_optional(pool) + .await?; + + if let Some(row) = row { + Ok(row.try_get("podcastid")?) + } else { + Err(AppError::not_found("Podcast not found")) + } } DatabasePool::MySQL(pool) => { - let rows = sqlx::query("SELECT UserID FROM Users WHERE IsAdmin = 1") - .fetch_all(pool) - .await?; - - let user_ids: Vec = rows.into_iter() - .map(|row| row.try_get("UserID")) - .collect::, _>>()?; - - Ok(user_ids) + let row = sqlx::query( + "SELECT PodcastID FROM Podcasts WHERE FeedURL = ? AND UserID = ?" + ) + .bind(feed_url) + .bind(user_id) + .fetch_optional(pool) + .await?; + + if let Some(row) = row { + Ok(row.try_get("PodcastID")?) + } else { + Err(AppError::not_found("Podcast not found")) + } + } + } + } + + // Get raw podcast details - returns all fields as JSON for get_podcast_details_dynamic + pub async fn get_podcast_details_raw(&self, user_id: i32, podcast_id: i32) -> AppResult> { + match self { + DatabasePool::Postgres(pool) => { + let row = sqlx::query( + r#"SELECT * FROM "Podcasts" WHERE podcastid = $1 AND userid = $2"# + ) + .bind(podcast_id) + .bind(user_id) + .fetch_optional(pool) + .await?; + + if let Some(row) = row { + let mut details = serde_json::Map::new(); + + details.insert("podcastname".to_string(), serde_json::Value::String(row.try_get::("podcastname").unwrap_or_default())); + details.insert("feedurl".to_string(), serde_json::Value::String(row.try_get::("feedurl").unwrap_or_default())); + details.insert("description".to_string(), serde_json::Value::String(row.try_get::("description").unwrap_or_default())); + details.insert("author".to_string(), serde_json::Value::String(row.try_get::("author").unwrap_or_default())); + details.insert("artworkurl".to_string(), serde_json::Value::String(row.try_get::, _>("artworkurl").unwrap_or_default().unwrap_or_default())); + details.insert("explicit".to_string(), serde_json::Value::Bool(row.try_get::("explicit").unwrap_or(false))); + details.insert("episodecount".to_string(), serde_json::Value::Number(serde_json::Number::from(row.try_get::("episodecount").unwrap_or(0)))); + let categories_str = row.try_get::("categories").unwrap_or_default(); + let categories_parsed = self.parse_categories_json(&categories_str).unwrap_or_default(); + details.insert("categories".to_string(), serde_json::to_value(categories_parsed).unwrap_or(serde_json::Value::Object(serde_json::Map::new()))); + details.insert("websiteurl".to_string(), serde_json::Value::String(row.try_get::("websiteurl").unwrap_or_default())); + details.insert("podcastindexid".to_string(), serde_json::Value::Number(serde_json::Number::from(row.try_get::("podcastindexid").unwrap_or(0)))); + details.insert("isyoutubechannel".to_string(), serde_json::Value::Bool(row.try_get::("isyoutubechannel").unwrap_or(false))); + + Ok(Some(serde_json::Value::Object(details))) + } else { + Ok(None) + } + } + DatabasePool::MySQL(pool) => { + let row = sqlx::query( + "SELECT * FROM Podcasts WHERE PodcastID = ? AND UserID = ?" + ) + .bind(podcast_id) + .bind(user_id) + .fetch_optional(pool) + .await?; + + if let Some(row) = row { + let mut details = serde_json::Map::new(); + + details.insert("podcastname".to_string(), serde_json::Value::String(row.try_get::("PodcastName").unwrap_or_default())); + details.insert("feedurl".to_string(), serde_json::Value::String(row.try_get::("FeedURL").unwrap_or_default())); + details.insert("description".to_string(), serde_json::Value::String(row.try_get::("Description").unwrap_or_default())); + details.insert("author".to_string(), serde_json::Value::String(row.try_get::("Author").unwrap_or_default())); + details.insert("artworkurl".to_string(), serde_json::Value::String(row.try_get::, _>("ArtworkURL").unwrap_or_default().unwrap_or_default())); + details.insert("explicit".to_string(), serde_json::Value::Bool(row.try_get::("Explicit").unwrap_or(false))); + details.insert("episodecount".to_string(), serde_json::Value::Number(serde_json::Number::from(row.try_get::("EpisodeCount").unwrap_or(0)))); + let categories_str = row.try_get::("Categories").unwrap_or_default(); + let categories_parsed = self.parse_categories_json(&categories_str).unwrap_or_default(); + details.insert("categories".to_string(), serde_json::to_value(categories_parsed).unwrap_or(serde_json::Value::Object(serde_json::Map::new()))); + details.insert("websiteurl".to_string(), serde_json::Value::String(row.try_get::("WebsiteURL").unwrap_or_default())); + details.insert("podcastindexid".to_string(), serde_json::Value::Number(serde_json::Number::from(row.try_get::("PodcastIndexID").unwrap_or(0)))); + details.insert("isyoutubechannel".to_string(), serde_json::Value::Bool(row.try_get::("IsYouTubeChannel").unwrap_or(false))); + + Ok(Some(serde_json::Value::Object(details))) + } else { + Ok(None) + } } } } - // Check if user already has a specific podcast feed - matches Python logic - pub async fn user_has_podcast_feed(&self, user_id: i32, feed_url: &str) -> AppResult { + // Get podcast values from feed - for get_podcast_details_dynamic when podcast is not added + pub async fn get_podcast_values_from_feed(&self, feed_url: &str, user_id: i32, _display_only: bool) -> AppResult { + // Use the real get_podcast_values function that exists in the codebase + let podcast_values = self.get_podcast_values(feed_url, user_id, None, None).await?; + + // Convert HashMap to the expected JSON format for get_podcast_details_dynamic + let response = serde_json::json!({ + "pod_title": podcast_values.get("podcastname").unwrap_or(&"Unknown Podcast".to_string()), + "pod_feed_url": feed_url, + "pod_description": podcast_values.get("description").unwrap_or(&"".to_string()), + "pod_author": podcast_values.get("author").unwrap_or(&"Unknown Author".to_string()), + "pod_artwork": podcast_values.get("artworkurl").unwrap_or(&"/static/assets/default-podcast.png".to_string()), + "pod_explicit": podcast_values.get("explicit").unwrap_or(&"False".to_string()) == "True", + "pod_episode_count": podcast_values.get("episodecount").unwrap_or(&"0".to_string()).parse::().unwrap_or(0), + "categories": podcast_values.get("categories").unwrap_or(&"{}".to_string()), + "pod_website": podcast_values.get("websiteurl").unwrap_or(&"".to_string()), + }); + + Ok(response) + } + + // Update feed cutoff days - for update_feed_cutoff_days endpoint + pub async fn update_feed_cutoff_days(&self, podcast_id: i32, user_id: i32, feed_cutoff_days: i32) -> AppResult { match self { DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#"SELECT podcastid FROM "Podcasts" WHERE userid = $1 AND feedurl = $2"#) + // First verify podcast exists and belongs to user + let existing = sqlx::query(r#"SELECT podcastid FROM "Podcasts" WHERE podcastid = $1 AND userid = $2"#) + .bind(podcast_id) .bind(user_id) - .bind(feed_url) .fetch_optional(pool) .await?; - - Ok(row.is_some()) + + if existing.is_none() { + return Ok(false); + } + + // Update the feed cutoff days + let result = sqlx::query(r#"UPDATE "Podcasts" SET feedcutoffdays = $1 WHERE podcastid = $2 AND userid = $3"#) + .bind(feed_cutoff_days) + .bind(podcast_id) + .bind(user_id) + .execute(pool) + .await?; + + Ok(result.rows_affected() > 0) } DatabasePool::MySQL(pool) => { - let row = sqlx::query("SELECT PodcastID FROM Podcasts WHERE UserID = ? AND FeedURL = ?") + // First verify podcast exists and belongs to user + let existing = sqlx::query("SELECT PodcastID FROM Podcasts WHERE PodcastID = ? AND UserID = ?") + .bind(podcast_id) .bind(user_id) - .bind(feed_url) .fetch_optional(pool) .await?; - - Ok(row.is_some()) + + if existing.is_none() { + return Ok(false); + } + + // Update the feed cutoff days + let result = sqlx::query("UPDATE Podcasts SET FeedCutoffDays = ? WHERE PodcastID = ? AND UserID = ?") + .bind(feed_cutoff_days) + .bind(podcast_id) + .bind(user_id) + .execute(pool) + .await?; + + Ok(result.rows_affected() > 0) } } } - // Add PinePods news feed to admin users - matches Python add_news_feed_if_not_added function - pub async fn add_news_feed_if_not_added(&self) -> AppResult<()> { - let admin_user_ids = self.get_all_admin_user_ids().await?; - let feed_url = "https://news.pinepods.online/feed.xml"; + // Update podcast basic info - for edit podcast functionality + pub async fn update_podcast_info(&self, podcast_id: i32, user_id: i32, feed_url: Option, username: Option, password: Option, podcast_name: Option, description: Option, author: Option, artwork_url: Option, website_url: Option, podcast_index_id: Option) -> AppResult { + match self { + DatabasePool::Postgres(pool) => { + // First verify podcast exists and belongs to user + let existing = sqlx::query(r#"SELECT podcastid FROM "Podcasts" WHERE podcastid = $1 AND userid = $2"#) + .bind(podcast_id) + .bind(user_id) + .fetch_optional(pool) + .await?; + if existing.is_none() { + return Ok(false); + } - for user_id in admin_user_ids { - // Check if this user already has the news feed - if !self.user_has_podcast_feed(user_id, feed_url).await? { - // Add the PinePods news feed using existing functions - matches Python add_custom_podcast - match self.get_podcast_values(feed_url, user_id, None, None).await { - Ok(podcast_values) => { - let feed_cutoff = 30; // Default cutoff like Python - if let Err(e) = self.add_podcast_from_values(&podcast_values, user_id, feed_cutoff).await { - eprintln!("Failed to add PinePods news feed for user {}: {}", user_id, e); - // Continue with other users even if one fails - } - }, - Err(e) => { - eprintln!("Failed to get podcast values for PinePods news feed for user {}: {}", user_id, e); - // Continue with other users even if one fails - } + // Build dynamic update query based on provided fields + let mut update_parts = Vec::new(); + let mut bind_count = 1; + + if feed_url.is_some() { + update_parts.push(format!("feedurl = ${}", bind_count)); + bind_count += 1; + } + if username.is_some() { + update_parts.push(format!("username = ${}", bind_count)); + bind_count += 1; + } + if password.is_some() { + update_parts.push(format!("password = ${}", bind_count)); + bind_count += 1; + } + if podcast_name.is_some() { + update_parts.push(format!("podcastname = ${}", bind_count)); + bind_count += 1; + } + if description.is_some() { + update_parts.push(format!("description = ${}", bind_count)); + bind_count += 1; + } + if author.is_some() { + update_parts.push(format!("author = ${}", bind_count)); + bind_count += 1; + } + if artwork_url.is_some() { + update_parts.push(format!("artworkurl = ${}", bind_count)); + bind_count += 1; + } + if website_url.is_some() { + update_parts.push(format!("websiteurl = ${}", bind_count)); + bind_count += 1; + } + if podcast_index_id.is_some() { + update_parts.push(format!("podcastindexid = ${}", bind_count)); + bind_count += 1; } - } - } - Ok(()) - } + if update_parts.is_empty() { + return Ok(false); + } - // Get YouTube video location - matches Python get_youtube_video_location function exactly - pub async fn get_youtube_video_location( - &self, - episode_id: i32, - user_id: i32, - ) -> AppResult> { - println!("Looking up YouTube video location for episode_id: {}, user_id: {}", episode_id, user_id); - - let youtube_id = match self { - DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#" - SELECT "YouTubeVideos".youtubevideoid - FROM "YouTubeVideos" - INNER JOIN "Podcasts" ON "YouTubeVideos".podcastid = "Podcasts".podcastid - WHERE "YouTubeVideos".videoid = $1 AND "Podcasts".userid = $2 - "#) - .bind(episode_id) - .bind(user_id) - .fetch_optional(pool) - .await?; + let query_str = format!( + r#"UPDATE "Podcasts" SET {} WHERE podcastid = ${} AND userid = ${}"#, + update_parts.join(", "), + bind_count, + bind_count + 1 + ); - if let Some(row) = row { - row.try_get::("youtubevideoid")? - } else { - return Ok(None); + let mut query = sqlx::query(&query_str); + + if let Some(url) = feed_url { + query = query.bind(url); + } + if let Some(uname) = username { + query = query.bind(uname); + } + if let Some(pwd) = password { + query = query.bind(pwd); } + if let Some(name) = podcast_name { + query = query.bind(name); + } + if let Some(desc) = description { + query = query.bind(desc); + } + if let Some(auth) = author { + query = query.bind(auth); + } + if let Some(artwork) = artwork_url { + query = query.bind(artwork); + } + if let Some(website) = website_url { + query = query.bind(website); + } + if let Some(idx_id) = podcast_index_id { + query = query.bind(idx_id); + } + + query = query.bind(podcast_id).bind(user_id); + + let result = query.execute(pool).await?; + Ok(result.rows_affected() > 0) } DatabasePool::MySQL(pool) => { - let row = sqlx::query(r#" - SELECT YouTubeVideos.YouTubeVideoID - FROM YouTubeVideos - INNER JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID - WHERE YouTubeVideos.VideoID = ? AND Podcasts.UserID = ? - "#) - .bind(episode_id) - .bind(user_id) - .fetch_optional(pool) - .await?; - - if let Some(row) = row { - row.try_get::("YouTubeVideoID")? - } else { - return Ok(None); + // First verify podcast exists and belongs to user + let existing = sqlx::query("SELECT PodcastID FROM Podcasts WHERE PodcastID = ? AND UserID = ?") + .bind(podcast_id) + .bind(user_id) + .fetch_optional(pool) + .await?; + if existing.is_none() { + return Ok(false); } - } - }; - println!("Found YouTube ID: {}", youtube_id); + // Build dynamic update query based on provided fields + let mut update_parts = Vec::new(); + + if feed_url.is_some() { + update_parts.push("FeedURL = ?"); + } + if username.is_some() { + update_parts.push("Username = ?"); + } + if password.is_some() { + update_parts.push("Password = ?"); + } + if podcast_name.is_some() { + update_parts.push("PodcastName = ?"); + } + if description.is_some() { + update_parts.push("Description = ?"); + } + if author.is_some() { + update_parts.push("Author = ?"); + } + if artwork_url.is_some() { + update_parts.push("ArtworkURL = ?"); + } + if website_url.is_some() { + update_parts.push("WebsiteURL = ?"); + } + if podcast_index_id.is_some() { + update_parts.push("PodcastIndexID = ?"); + } - let file_path = format!("/opt/pinepods/downloads/youtube/{}.mp3", youtube_id); - let file_path_double = format!("/opt/pinepods/downloads/youtube/{}.mp3.mp3", youtube_id); + if update_parts.is_empty() { + return Ok(false); + } - println!("Checking paths: {} and {}", file_path, file_path_double); + let query_str = format!( + "UPDATE Podcasts SET {} WHERE PodcastID = ? AND UserID = ?", + update_parts.join(", ") + ); - if tokio::fs::metadata(&file_path).await.is_ok() { - println!("Found file at {}", file_path); - Ok(Some(file_path)) - } else if tokio::fs::metadata(&file_path_double).await.is_ok() { - println!("Found file at {}", file_path_double); - Ok(Some(file_path_double)) - } else { - println!("No file found for YouTube ID: {}", youtube_id); - Ok(None) + let mut query = sqlx::query(&query_str); + + if let Some(url) = feed_url { + query = query.bind(url); + } + if let Some(uname) = username { + query = query.bind(uname); + } + if let Some(pwd) = password { + query = query.bind(pwd); + } + if let Some(name) = podcast_name { + query = query.bind(name); + } + if let Some(desc) = description { + query = query.bind(desc); + } + if let Some(auth) = author { + query = query.bind(auth); + } + if let Some(artwork) = artwork_url { + query = query.bind(artwork); + } + if let Some(website) = website_url { + query = query.bind(website); + } + if let Some(idx_id) = podcast_index_id { + query = query.bind(idx_id); + } + + query = query.bind(podcast_id).bind(user_id); + + let result = query.execute(pool).await?; + Ok(result.rows_affected() > 0) + } } } - // Get download location - matches Python get_download_location function exactly - pub async fn get_download_location( - &self, - episode_id: i32, - user_id: i32, - ) -> AppResult> { - println!("Looking up download location for episode_id: {}, user_id: {}", episode_id, user_id); - + // Bulk episode operations for efficient batch processing + pub async fn bulk_mark_episodes_completed(&self, episode_ids: Vec, user_id: i32, is_youtube: bool) -> AppResult<(i32, i32)> { + if episode_ids.is_empty() { + return Ok((0, 0)); + } + + let mut processed = 0; + let mut failed = 0; + match self { DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#"SELECT downloadedlocation FROM "DownloadedEpisodes" WHERE episodeid = $1 AND userid = $2"#) - .bind(episode_id) - .bind(user_id) - .fetch_optional(pool) - .await?; + let mut tx = pool.begin().await?; - if let Some(row) = row { - let location: String = row.try_get("downloadedlocation")?; - println!("DownloadedLocation found: {}", location); - Ok(Some(location)) + if is_youtube { + for episode_id in episode_ids { + match self.mark_episode_completed(episode_id, user_id, is_youtube).await { + Ok(_) => processed += 1, + Err(_) => failed += 1, + } + } } else { - println!("No DownloadedLocation found for the given EpisodeID and UserID"); - Ok(None) + // Batch update regular episodes + let episode_ids_str: Vec = episode_ids.iter().map(|id| id.to_string()).collect(); + let ids_clause = episode_ids_str.join(","); + + let query = format!( + r#"UPDATE "Episodes" SET completed = TRUE WHERE episodeid IN ({})"#, + ids_clause + ); + + let result = sqlx::query(&query).execute(&mut *tx).await?; + processed = result.rows_affected() as i32; } + + tx.commit().await?; } DatabasePool::MySQL(pool) => { - let row = sqlx::query("SELECT DownloadedLocation FROM DownloadedEpisodes WHERE EpisodeID = ? AND UserID = ?") - .bind(episode_id) - .bind(user_id) - .fetch_optional(pool) - .await?; + let mut tx = pool.begin().await?; - if let Some(row) = row { - let location: String = row.try_get("DownloadedLocation")?; - println!("DownloadedLocation found: {}", location); - Ok(Some(location)) + if is_youtube { + for episode_id in episode_ids { + match self.mark_episode_completed(episode_id, user_id, is_youtube).await { + Ok(_) => processed += 1, + Err(_) => failed += 1, + } + } } else { - println!("No DownloadedLocation found for the given EpisodeID and UserID"); - Ok(None) + // Batch update regular episodes + let episode_ids_str: Vec = episode_ids.iter().map(|id| id.to_string()).collect(); + let ids_clause = episode_ids_str.join(","); + + let query = format!( + "UPDATE Episodes SET Completed = TRUE WHERE EpisodeID IN ({})", + ids_clause + ); + + let result = sqlx::query(&query).execute(&mut *tx).await?; + processed = result.rows_affected() as i32; } + + tx.commit().await?; } } - } - // Update YouTube video duration after download - updates duration from MP3 file - pub async fn update_youtube_video_duration(&self, video_id: &str, duration_seconds: i32) -> AppResult<()> { - println!("Updating duration for YouTube video {} to {} seconds", video_id, duration_seconds); - - match self { - DatabasePool::Postgres(pool) => { - sqlx::query(r#"UPDATE "YouTubeVideos" SET duration = $1 WHERE youtubevideoid = $2"#) - .bind(duration_seconds) - .bind(video_id) - .execute(pool) - .await?; - } - DatabasePool::MySQL(pool) => { - sqlx::query("UPDATE YouTubeVideos SET Duration = ? WHERE YouTubeVideoID = ?") - .bind(duration_seconds) - .bind(video_id) - .execute(pool) - .await?; - } - } - - println!("Successfully updated duration for YouTube video {}", video_id); - Ok(()) + Ok((processed, failed)) } -} -#[derive(Debug)] -pub struct PodcastValues { - pub pod_title: String, - pub pod_description: String, - pub pod_artwork: String, - pub pod_feed_url: String, - pub user_id: i32, -} + pub async fn bulk_save_episodes(&self, episode_ids: Vec, user_id: i32, is_youtube: bool) -> AppResult<(i32, i32)> { + if episode_ids.is_empty() { + return Ok((0, 0)); + } -// Playlist configuration struct - matches Python playlist data structure exactly -#[derive(Debug, Clone)] -pub struct PlaylistConfig { - pub playlist_id: i32, - pub name: String, - pub user_id: i32, - pub podcast_ids: Option>, - pub include_unplayed: bool, - pub include_partially_played: bool, - pub include_played: bool, - pub play_progress_min: Option, - pub play_progress_max: Option, - pub time_filter_hours: Option, - pub min_duration: Option, - pub max_duration: Option, - pub sort_order: String, - pub group_by_podcast: bool, - pub max_episodes: Option, - pub is_system_playlist: bool, -} + let mut processed = 0; + let mut failed = 0; -impl PlaylistConfig { - // Create from PostgreSQL row - matches Python playlist dictionary extraction - pub fn from_postgres_row(row: &sqlx::postgres::PgRow) -> AppResult { - use sqlx::Row; - - // Parse podcast IDs from PostgreSQL array or JSON - let podcast_ids = if let Ok(ids_str) = row.try_get::, _>("podcastids") { - if let Some(ids_str) = ids_str { - if ids_str.is_empty() || ids_str == "[]" || ids_str == "{}" { - None - } else if ids_str.starts_with('{') && ids_str.ends_with('}') { - // PostgreSQL array format: {1,2,3} - let trimmed = ids_str.trim_start_matches('{').trim_end_matches('}'); - if trimmed.is_empty() { - None - } else { - let ids: Result, _> = trimmed - .split(',') - .map(|s| s.trim().parse::()) - .collect(); - Some(ids.unwrap_or_default()) + match self { + DatabasePool::Postgres(pool) => { + let mut tx = pool.begin().await?; + + if is_youtube { + for episode_id in episode_ids { + // Check if already saved to avoid duplicates + let existing = sqlx::query( + r#"SELECT "SaveID" FROM "SavedVideos" WHERE "VideoID" = $1 AND "UserID" = $2"# + ) + .bind(episode_id) + .bind(user_id) + .fetch_optional(&mut *tx) + .await?; + + if existing.is_none() { + match sqlx::query( + r#"INSERT INTO "SavedVideos" ("VideoID", "UserID") VALUES ($1, $2)"# + ) + .bind(episode_id) + .bind(user_id) + .execute(&mut *tx) + .await { + Ok(_) => processed += 1, + Err(_) => failed += 1, + } + } } - } else if ids_str.starts_with('[') && ids_str.ends_with(']') { - // JSON array format: [1,2,3] - serde_json::from_str(&ids_str).unwrap_or(None) - } else { - None - } - } else { - None - } - } else { - None - }; - - Ok(PlaylistConfig { - playlist_id: row.try_get("playlistid")?, - name: row.try_get("name")?, - user_id: row.try_get("userid")?, - podcast_ids, - include_unplayed: row.try_get("includeunplayed").unwrap_or(true), - include_partially_played: row.try_get("includepartiallyplayed").unwrap_or(true), - include_played: row.try_get("includeplayed").unwrap_or(false), - play_progress_min: row.try_get("playprogressmin").ok(), - play_progress_max: row.try_get("playprogressmax").ok(), - time_filter_hours: row.try_get("timefilterhours").ok(), - min_duration: row.try_get("minduration").ok(), - max_duration: row.try_get("maxduration").ok(), - sort_order: row.try_get("sortorder").unwrap_or_else(|_| "date_desc".to_string()), - group_by_podcast: row.try_get("groupbypodcast").unwrap_or(false), - max_episodes: row.try_get("maxepisodes").ok(), - is_system_playlist: row.try_get("issystemplaylist").unwrap_or(false), - }) - } - - // Create from MySQL row - matches Python playlist dictionary extraction - pub fn from_mysql_row(row: &sqlx::mysql::MySqlRow) -> AppResult { - use sqlx::Row; - - // Parse podcast IDs from MySQL JSON - let podcast_ids = if let Ok(ids_str) = row.try_get::, _>("PodcastIds") { - if let Some(ids_str) = ids_str { - if ids_str.is_empty() || ids_str == "null" { - None } else { - serde_json::from_str(&ids_str).unwrap_or(None) + for episode_id in episode_ids { + // Check if already saved to avoid duplicates + let existing = sqlx::query( + r#"SELECT saveid FROM "SavedEpisodes" WHERE episodeid = $1 AND userid = $2"# + ) + .bind(episode_id) + .bind(user_id) + .fetch_optional(&mut *tx) + .await?; + + if existing.is_none() { + match sqlx::query( + r#"INSERT INTO "SavedEpisodes" (episodeid, userid) VALUES ($1, $2)"# + ) + .bind(episode_id) + .bind(user_id) + .execute(&mut *tx) + .await { + Ok(_) => processed += 1, + Err(_) => failed += 1, + } + } + } } - } else { - None + + tx.commit().await?; + } + DatabasePool::MySQL(pool) => { + let mut tx = pool.begin().await?; + + if is_youtube { + for episode_id in episode_ids { + let existing = sqlx::query( + "SELECT SaveID FROM SavedVideos WHERE VideoID = ? AND UserID = ?" + ) + .bind(episode_id) + .bind(user_id) + .fetch_optional(&mut *tx) + .await?; + + if existing.is_none() { + match sqlx::query( + "INSERT INTO SavedVideos (VideoID, UserID) VALUES (?, ?)" + ) + .bind(episode_id) + .bind(user_id) + .execute(&mut *tx) + .await { + Ok(_) => processed += 1, + Err(_) => failed += 1, + } + } + } + } else { + for episode_id in episode_ids { + let existing = sqlx::query( + "SELECT SaveID FROM SavedEpisodes WHERE EpisodeID = ? AND UserID = ?" + ) + .bind(episode_id) + .bind(user_id) + .fetch_optional(&mut *tx) + .await?; + + if existing.is_none() { + match sqlx::query( + "INSERT INTO SavedEpisodes (EpisodeID, UserID) VALUES (?, ?)" + ) + .bind(episode_id) + .bind(user_id) + .execute(&mut *tx) + .await { + Ok(_) => processed += 1, + Err(_) => failed += 1, + } + } + } + } + + tx.commit().await?; } - } else { - None - }; - - Ok(PlaylistConfig { - playlist_id: row.try_get("PlaylistID")?, - name: row.try_get("PlaylistName")?, - user_id: row.try_get("UserID")?, - podcast_ids, - include_unplayed: row.try_get("IncludeUnplayed").unwrap_or(true), - include_partially_played: row.try_get("IncludePartiallyPlayed").unwrap_or(true), - include_played: row.try_get("IncludePlayed").unwrap_or(false), - play_progress_min: row.try_get("PlayProgressMin").ok(), - play_progress_max: row.try_get("PlayProgressMax").ok(), - time_filter_hours: row.try_get("TimeFilterHours").ok(), - min_duration: row.try_get("MinDuration").ok(), - max_duration: row.try_get("MaxDuration").ok(), - sort_order: row.try_get("SortOrder").unwrap_or_else(|_| "date_desc".to_string()), - group_by_podcast: row.try_get("GroupByPodcast").unwrap_or(false), - max_episodes: row.try_get("MaxEpisodes").ok(), - is_system_playlist: row.try_get("IsSystemPlaylist").unwrap_or(false), - }) - } - - // Get PostgreSQL sort order - matches Python sort_mapping exactly (for inner query) - pub fn get_postgres_sort_order(&self) -> String { - match self.sort_order.as_str() { - "date_asc" => "ORDER BY e.episodepubdate ASC".to_string(), - "date_desc" => "ORDER BY e.episodepubdate DESC".to_string(), - "duration_asc" => "ORDER BY e.episodeduration ASC".to_string(), - "duration_desc" => "ORDER BY e.episodeduration DESC".to_string(), - "listen_progress" => "ORDER BY (COALESCE(h.listenduration, 0)::float / NULLIF(e.episodeduration, 0)) DESC".to_string(), - "completion" => "ORDER BY COALESCE(h.listenduration::float / NULLIF(e.episodeduration, 0), 0) DESC".to_string(), - _ => "ORDER BY e.episodepubdate DESC".to_string(), - } - } - - // Get PostgreSQL sort order for outer query (ROW_NUMBER OVER) - fixed alias scoping - pub fn get_postgres_outer_sort_order(&self) -> String { - match self.sort_order.as_str() { - "date_asc" => "ORDER BY episodes.episodepubdate ASC".to_string(), - "date_desc" => "ORDER BY episodes.episodepubdate DESC".to_string(), - "duration_asc" => "ORDER BY episodes.episodeduration ASC".to_string(), - "duration_desc" => "ORDER BY episodes.episodeduration DESC".to_string(), - "listen_progress" => "ORDER BY (episodes.listenduration::float / NULLIF(episodes.episodeduration, 0)) DESC".to_string(), - "completion" => "ORDER BY (episodes.listenduration::float / NULLIF(episodes.episodeduration, 0)) DESC".to_string(), - _ => "ORDER BY episodes.episodepubdate DESC".to_string(), - } - } - - // Get MySQL sort order - matches Python sort_mapping exactly (for inner query) - pub fn get_mysql_sort_order(&self) -> String { - match self.sort_order.as_str() { - "date_asc" => "ORDER BY e.EpisodePubDate ASC".to_string(), - "date_desc" => "ORDER BY e.EpisodePubDate DESC".to_string(), - "duration_asc" => "ORDER BY e.EpisodeDuration ASC".to_string(), - "duration_desc" => "ORDER BY e.EpisodeDuration DESC".to_string(), - "listen_progress" => "ORDER BY (COALESCE(h.ListenDuration, 0) / NULLIF(e.EpisodeDuration, 0)) DESC".to_string(), - "completion" => "ORDER BY COALESCE(h.ListenDuration / NULLIF(e.EpisodeDuration, 0), 0) DESC".to_string(), - _ => "ORDER BY e.EpisodePubDate DESC".to_string(), - } - } - - // Get MySQL sort order for outer query (ROW_NUMBER OVER) - fixed alias scoping - pub fn get_mysql_outer_sort_order(&self) -> String { - match self.sort_order.as_str() { - "date_asc" => "ORDER BY episodes.EpisodePubDate ASC".to_string(), - "date_desc" => "ORDER BY episodes.EpisodePubDate DESC".to_string(), - "duration_asc" => "ORDER BY episodes.EpisodeDuration ASC".to_string(), - "duration_desc" => "ORDER BY episodes.EpisodeDuration DESC".to_string(), - "listen_progress" => "ORDER BY (episodes.ListenDuration / NULLIF(episodes.EpisodeDuration, 0)) DESC".to_string(), - "completion" => "ORDER BY (episodes.ListenDuration / NULLIF(episodes.EpisodeDuration, 0)) DESC".to_string(), - _ => "ORDER BY episodes.EpisodePubDate DESC".to_string(), } + + Ok((processed, failed)) } - - // Check if this is an "Almost Done" playlist - matches Python logic - pub fn is_almost_done(&self) -> bool { - self.name == "Almost Done" || - (self.include_partially_played && - !self.include_unplayed && - !self.include_played && - self.play_progress_min.map_or(false, |min| min >= 75.0)) - } - - // Check if this is a "Currently Listening" playlist - matches Python logic - pub fn is_currently_listening(&self) -> bool { - self.name == "Currently Listening" || - (self.include_partially_played && - !self.include_unplayed && - !self.include_played && - self.play_progress_min.is_none() && - self.play_progress_max.is_none()) - } - - // Check if this is a "Fresh Releases" playlist - matches Python logic - pub fn is_fresh_releases(&self) -> bool { - self.name == "Fresh Releases" && self.is_system_playlist - } - - // Get effective time filter hours - Fresh Releases defaults to 24 if not set - pub fn get_effective_time_filter_hours(&self) -> Option { - if self.is_fresh_releases() && self.time_filter_hours.is_none() { - Some(24) // Default 24 hours for Fresh Releases - } else { - self.time_filter_hours + + pub async fn bulk_queue_episodes(&self, episode_ids: Vec, user_id: i32, is_youtube: bool) -> AppResult<(i32, i32)> { + if episode_ids.is_empty() { + return Ok((0, 0)); } - } -} -impl DatabasePool { - // RSS key validation - matches Python get_rss_key_if_valid function - pub async fn get_rss_key_if_valid(&self, api_key: &str, podcast_ids: Option<&Vec>) -> AppResult> { - use crate::handlers::feed::RssKeyInfo; - - let filter_podcast_ids = podcast_ids.map_or(false, |ids| !ids.is_empty() && !ids.contains(&-1)); - + let mut processed = 0; + let mut failed = 0; + match self { DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#" - SELECT fk.userid, STRING_AGG(CAST(fkm.podcastid AS TEXT), ',') as podcastids - FROM "RssKeys" fk - LEFT JOIN "RssKeyMap" fkm ON fk.rsskeyid = fkm.rsskeyid - WHERE fk.rsskey = $1 - GROUP BY fk.userid - "#) - .bind(api_key) - .fetch_optional(pool) + let mut tx = pool.begin().await?; + + if is_youtube { + for episode_id in episode_ids { + // Check if already queued to avoid duplicates + let existing = sqlx::query( + r#"SELECT "QueueID" FROM "QueuedVideos" WHERE "VideoID" = $1 AND "UserID" = $2"# + ) + .bind(episode_id) + .bind(user_id) + .fetch_optional(&mut *tx) + .await?; + + if existing.is_none() { + match sqlx::query( + r#"INSERT INTO "QueuedVideos" ("VideoID", "UserID") VALUES ($1, $2)"# + ) + .bind(episode_id) + .bind(user_id) + .execute(&mut *tx) + .await { + Ok(_) => processed += 1, + Err(_) => failed += 1, + } + } + } + } else { + // Get max queue position for user + let max_pos_row = sqlx::query( + r#"SELECT COALESCE(MAX(queueposition), 0) as max_pos FROM "EpisodeQueue" WHERE userid = $1"# + ) + .bind(user_id) + .fetch_one(&mut *tx) .await?; + + let mut max_pos: i32 = max_pos_row.try_get("max_pos")?; + + for episode_id in episode_ids { + // Check if already queued to avoid duplicates + let existing = sqlx::query( + r#"SELECT queueid FROM "EpisodeQueue" WHERE episodeid = $1 AND userid = $2 AND is_youtube = $3"# + ) + .bind(episode_id) + .bind(user_id) + .bind(is_youtube) + .fetch_optional(&mut *tx) + .await?; + + if existing.is_none() { + max_pos += 1; + match sqlx::query( + r#"INSERT INTO "EpisodeQueue" (episodeid, userid, queueposition, is_youtube) VALUES ($1, $2, $3, $4)"# + ) + .bind(episode_id) + .bind(user_id) + .bind(max_pos) + .bind(is_youtube) + .execute(&mut *tx) + .await { + Ok(_) => processed += 1, + Err(_) => failed += 1, + } + } + } + } - if let Some(row) = row { - let user_id: i32 = row.try_get("userid")?; - let podcast_ids_str: Option = row.try_get("podcastids").ok(); + tx.commit().await?; + } + DatabasePool::MySQL(pool) => { + let mut tx = pool.begin().await?; + + if is_youtube { + for episode_id in episode_ids { + let existing = sqlx::query( + "SELECT QueueID FROM QueuedVideos WHERE VideoID = ? AND UserID = ?" + ) + .bind(episode_id) + .bind(user_id) + .fetch_optional(&mut *tx) + .await?; + + if existing.is_none() { + match sqlx::query( + "INSERT INTO QueuedVideos (VideoID, UserID) VALUES (?, ?)" + ) + .bind(episode_id) + .bind(user_id) + .execute(&mut *tx) + .await { + Ok(_) => processed += 1, + Err(_) => failed += 1, + } + } + } + } else { + // Get max queue position for user + let max_pos_row = sqlx::query( + "SELECT COALESCE(MAX(QueuePosition), 0) as max_pos FROM EpisodeQueue WHERE UserID = ?" + ) + .bind(user_id) + .fetch_one(&mut *tx) + .await?; - let key_podcast_ids = if let Some(ids_str) = podcast_ids_str { - ids_str.split(',') - .filter_map(|s| s.parse::().ok()) - .collect::>() - } else { - vec![-1] // Universal access if no specific podcasts - }; + let mut max_pos: i32 = max_pos_row.try_get("max_pos")?; - // Check if access is allowed - if filter_podcast_ids { - if let Some(requested_ids) = podcast_ids { - let has_universal = key_podcast_ids.contains(&-1); - let has_specific_access = requested_ids.iter() - .all(|id| key_podcast_ids.contains(id)); - - if !has_universal && !has_specific_access { - return Ok(None); + for episode_id in episode_ids { + let existing = sqlx::query( + "SELECT QueueID FROM EpisodeQueue WHERE EpisodeID = ? AND UserID = ? AND is_youtube = ?" + ) + .bind(episode_id) + .bind(user_id) + .bind(is_youtube) + .fetch_optional(&mut *tx) + .await?; + + if existing.is_none() { + max_pos += 1; + match sqlx::query( + "INSERT INTO EpisodeQueue (EpisodeID, UserID, QueuePosition, is_youtube) VALUES (?, ?, ?, ?)" + ) + .bind(episode_id) + .bind(user_id) + .bind(max_pos) + .bind(is_youtube) + .execute(&mut *tx) + .await { + Ok(_) => processed += 1, + Err(_) => failed += 1, } } } - - Ok(Some(RssKeyInfo { - podcast_ids: key_podcast_ids, - user_id, - key: api_key.to_string(), - })) + } + + tx.commit().await?; + } + } + + Ok((processed, failed)) + } + + // Bulk delete downloaded episodes - efficient batch processing for mass deletion + pub async fn bulk_delete_downloaded_episodes(&self, episode_ids: Vec, user_id: i32, is_youtube: bool) -> AppResult<(i32, i32)> { + if episode_ids.is_empty() { + return Ok((0, 0)); + } + + let mut processed = 0; + let mut failed = 0; + + match self { + DatabasePool::Postgres(pool) => { + let mut tx = pool.begin().await?; + + if is_youtube { + // Delete YouTube videos from DownloadedEpisodes (they use the same table but different logic) + for episode_id in episode_ids { + match sqlx::query( + r#"DELETE FROM "DownloadedEpisodes" WHERE episodeid = $1 AND userid = $2"# + ) + .bind(episode_id) + .bind(user_id) + .execute(&mut *tx) + .await { + Ok(result) => { + if result.rows_affected() > 0 { + processed += 1; + } else { + failed += 1; // Episode wasn't downloaded by this user + } + }, + Err(_) => failed += 1, + } + } } else { - Ok(None) + // Batch delete regular episodes using IN clause for efficiency + let episode_ids_str: Vec = episode_ids.iter().map(|id| id.to_string()).collect(); + let ids_clause = episode_ids_str.join(","); + + let query = format!( + r#"DELETE FROM "DownloadedEpisodes" WHERE episodeid IN ({}) AND userid = $1"#, + ids_clause + ); + + let result = sqlx::query(&query) + .bind(user_id) + .execute(&mut *tx) + .await?; + processed = result.rows_affected() as i32; + failed = episode_ids.len() as i32 - processed; // Assume failures are episodes not found } + + tx.commit().await?; } DatabasePool::MySQL(pool) => { - let row = sqlx::query(r#" - SELECT fk.UserID, GROUP_CONCAT(fkm.PodcastID) as podcastids - FROM RssKeys fk - LEFT JOIN RssKeyMap fkm ON fk.RssKeyID = fkm.RssKeyID - WHERE fk.RssKey = ? - GROUP BY fk.UserID - "#) - .bind(api_key) - .fetch_optional(pool) - .await?; + let mut tx = pool.begin().await?; - if let Some(row) = row { - let user_id: i32 = row.try_get("UserID")?; - let podcast_ids_str: Option = row.try_get("podcastids").ok(); - - let key_podcast_ids = if let Some(ids_str) = podcast_ids_str { - ids_str.split(',') - .filter_map(|s| s.parse::().ok()) - .collect::>() - } else { - vec![-1] // Universal access if no specific podcasts - }; - - // Check if access is allowed - if filter_podcast_ids { - if let Some(requested_ids) = podcast_ids { - let has_universal = key_podcast_ids.contains(&-1); - let has_specific_access = requested_ids.iter() - .all(|id| key_podcast_ids.contains(id)); - - if !has_universal && !has_specific_access { - return Ok(None); - } + if is_youtube { + // Delete YouTube videos from DownloadedEpisodes + for episode_id in episode_ids { + match sqlx::query( + "DELETE FROM DownloadedEpisodes WHERE EpisodeID = ? AND UserID = ?" + ) + .bind(episode_id) + .bind(user_id) + .execute(&mut *tx) + .await { + Ok(result) => { + if result.rows_affected() > 0 { + processed += 1; + } else { + failed += 1; // Episode wasn't downloaded by this user + } + }, + Err(_) => failed += 1, } } - - Ok(Some(RssKeyInfo { - podcast_ids: key_podcast_ids, - user_id, - key: api_key.to_string(), - })) } else { - Ok(None) + // Batch delete regular episodes using IN clause for efficiency + let episode_ids_str: Vec = episode_ids.iter().map(|id| id.to_string()).collect(); + let ids_clause = episode_ids_str.join(","); + + let query = format!( + "DELETE FROM DownloadedEpisodes WHERE EpisodeID IN ({}) AND UserID = ?", + ids_clause + ); + + let result = sqlx::query(&query) + .bind(user_id) + .execute(&mut *tx) + .await?; + processed = result.rows_affected() as i32; + failed = episode_ids.len() as i32 - processed; // Assume failures are episodes not found } + + tx.commit().await?; } } + + Ok((processed, failed)) } - // Get RSS feed status - matches Python get_rss_feed_status function - pub async fn get_rss_feed_status(&self, user_id: i32) -> AppResult { + // Set up internal gpodder sync - matches Python set_gpodder_internal_sync function exactly + pub async fn set_gpodder_internal_sync(&self, user_id: i32) -> AppResult { match self { DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#"SELECT enablerssfeeds FROM "Users" WHERE userid = $1"#) + // Get the username and current sync type + let user_row = sqlx::query(r#"SELECT username, pod_sync_type FROM "Users" WHERE userid = $1"#) .bind(user_id) .fetch_optional(pool) .await?; - if let Some(row) = row { - let enabled: Option = row.try_get("enablerssfeeds").ok(); - Ok(enabled.unwrap_or(false)) + let (username, current_sync_type) = if let Some(row) = user_row { + let username: String = row.try_get("username")?; + let sync_type: Option = row.try_get("pod_sync_type")?; + (username, sync_type.unwrap_or_else(|| "None".to_string())) } else { - Ok(false) + return Err(AppError::not_found("User not found")); + }; + + // Generate new sync type based on current + let new_sync_type = match current_sync_type.as_str() { + "external" => "both", + "None" | "" => "gpodder", + _ => ¤t_sync_type, + }; + + // Generate a secure internal token (64 characters alphanumeric) + use rand::{distr::Alphanumeric, Rng}; + let internal_token: String = rand::rng() + .sample_iter(&Alphanumeric) + .take(64) + .map(char::from) + .collect(); + + let local_gpodder_url = "http://localhost:8042"; + + // Create default device name + let default_device_name = format!("pinepods-internal-{}", user_id); + + // Update user with internal gpodder settings and set default device + sqlx::query(r#" + UPDATE "Users" + SET gpodderurl = $1, gpoddertoken = $2, gpodderloginname = $3, pod_sync_type = $4, defaultgpodderdevice = $6 + WHERE userid = $5 + "#) + .bind(local_gpodder_url) + .bind(&internal_token) + .bind(&username) + .bind(new_sync_type) + .bind(user_id) + .bind(&default_device_name) + .execute(pool) + .await?; + + // Create device via gPodder API (matches Python version exactly) + let device_result = match self.create_device_via_gpodder_api(local_gpodder_url, &username, &internal_token, &default_device_name).await { + Ok(device_id) => { + serde_json::json!({ + "device_name": default_device_name, + "device_id": device_id, + "success": true + }) + } + Err(e) => { + tracing::warn!("Failed to create device via API: {}, continuing anyway", e); + // Even if device creation fails, still return success (matches Python behavior) + serde_json::json!({ + "device_name": default_device_name, + "device_id": user_id, + "success": true + }) + } + }; + + // Perform initial full sync to get ALL user subscriptions from all devices + if let Err(e) = self.call_gpodder_initial_full_sync(user_id, local_gpodder_url, &username, &internal_token, &default_device_name).await { + tracing::warn!("Initial GPodder full sync failed during setup: {}", e); + // Don't fail setup if initial sync fails } + + Ok(device_result) } DatabasePool::MySQL(pool) => { - let row = sqlx::query("SELECT EnableRSSFeeds FROM Users WHERE UserID = ?") + // Get the username and current sync type + let user_row = sqlx::query("SELECT Username, Pod_Sync_Type FROM Users WHERE UserID = ?") .bind(user_id) .fetch_optional(pool) .await?; - if let Some(row) = row { - let enabled: Option = row.try_get("EnableRSSFeeds").ok(); - Ok(enabled.unwrap_or(false)) + let (username, current_sync_type) = if let Some(row) = user_row { + let username: String = row.try_get("Username")?; + let sync_type: Option = row.try_get("Pod_Sync_Type")?; + (username, sync_type.unwrap_or_else(|| "None".to_string())) } else { - Ok(false) + return Err(AppError::not_found("User not found")); + }; + + // Generate new sync type based on current + let new_sync_type = match current_sync_type.as_str() { + "external" => "both", + "None" | "" => "gpodder", + _ => ¤t_sync_type, + }; + + // Generate a secure internal token (64 characters alphanumeric) + use rand::{distr::Alphanumeric, Rng}; + let internal_token: String = rand::rng() + .sample_iter(&Alphanumeric) + .take(64) + .map(char::from) + .collect(); + + let local_gpodder_url = "http://localhost:8042"; + + // Create default device name + let default_device_name = format!("pinepods-internal-{}", user_id); + + // Update user with internal gpodder settings and set default device + sqlx::query(" + UPDATE Users + SET GpodderUrl = ?, GpodderToken = ?, GpodderLoginName = ?, Pod_Sync_Type = ?, DefaultGpodderDevice = ? + WHERE UserID = ? + ") + .bind(local_gpodder_url) + .bind(&internal_token) + .bind(&username) + .bind(new_sync_type) + .bind(&default_device_name) + .bind(user_id) + .execute(pool) + .await?; + + // Create device via gPodder API (matches Python version exactly) + let device_result = match self.create_device_via_gpodder_api(local_gpodder_url, &username, &internal_token, &default_device_name).await { + Ok(device_id) => { + serde_json::json!({ + "device_name": default_device_name, + "device_id": device_id, + "success": true + }) + } + Err(e) => { + tracing::warn!("Failed to create device via API: {}, continuing anyway", e); + // Even if device creation fails, still return success (matches Python behavior) + serde_json::json!({ + "device_name": default_device_name, + "device_id": user_id, + "success": true + }) + } + }; + + // Perform initial full sync to get ALL user subscriptions from all devices + if let Err(e) = self.call_gpodder_initial_full_sync(user_id, local_gpodder_url, &username, &internal_token, &default_device_name).await { + tracing::warn!("Initial GPodder full sync failed during setup: {}", e); + // Don't fail setup if initial sync fails } + + Ok(device_result) } } } - // Generate podcast RSS feed - matches Python generate_podcast_rss function - pub async fn generate_podcast_rss( - &self, - rss_key: crate::handlers::feed::RssKeyInfo, - limit: i32, - source_type: Option<&str>, - domain: &str, - podcast_ids: Option<&Vec>, - ) -> AppResult { - use chrono::{DateTime, Utc}; - - let user_id = rss_key.user_id; - let mut effective_podcast_ids = rss_key.podcast_ids.clone(); - - // If podcast_id parameter is provided, use it; otherwise use RSS key podcast_ids - let explicit_podcast_filter = podcast_ids.is_some(); - if let Some(ids) = podcast_ids { - if !ids.is_empty() { - effective_podcast_ids = ids.clone(); - } - } - - let podcast_filter = explicit_podcast_filter || (!effective_podcast_ids.is_empty() && !effective_podcast_ids.contains(&-1)); - - // Check if RSS feeds are enabled for user - if !self.get_rss_feed_status(user_id).await? { - return Err(AppError::forbidden("RSS feeds not enabled for this user")); - } - - // Get user info for feed metadata - let username = match self { + // Disable internal gpodder sync - matches Python disable_gpodder_internal_sync function exactly + pub async fn disable_gpodder_internal_sync(&self, user_id: i32) -> AppResult { + // Get current user gpodder status + let user_status = self.gpodder_get_status(user_id).await?; + let current_sync_type = &user_status.sync_type; + + // Determine new sync type + let new_sync_type = match current_sync_type.as_str() { + "both" => "external", + "gpodder" => "None", + _ => current_sync_type, + }; + + match self { DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#"SELECT username FROM "Users" WHERE userid = $1"#) + // If internal API is being used, clear the settings + if user_status.gpodder_url.as_deref() == Some("http://localhost:8042") { + sqlx::query(r#" + UPDATE "Users" + SET gpodderurl = '', gpoddertoken = '', gpodderloginname = '', pod_sync_type = $1 + WHERE userid = $2 + "#) + .bind(new_sync_type) .bind(user_id) - .fetch_optional(pool) + .execute(pool) .await?; - - if let Some(row) = row { - row.try_get::("username").unwrap_or_else(|_| "Unknown User".to_string()) } else { - return Err(AppError::not_found("User not found")); + // Just update the sync type + sqlx::query(r#"UPDATE "Users" SET pod_sync_type = $1 WHERE userid = $2"#) + .bind(new_sync_type) + .bind(user_id) + .execute(pool) + .await?; } } DatabasePool::MySQL(pool) => { - let row = sqlx::query("SELECT Username FROM Users WHERE UserID = ?") + // If internal API is being used, clear the settings + if user_status.gpodder_url.as_deref() == Some("http://localhost:8042") { + sqlx::query(" + UPDATE Users + SET GpodderUrl = '', GpodderToken = '', GpodderLoginName = '', Pod_Sync_Type = ? + WHERE UserID = ? + ") + .bind(new_sync_type) .bind(user_id) - .fetch_optional(pool) + .execute(pool) .await?; - - if let Some(row) = row { - row.try_get::("Username").unwrap_or_else(|_| "Unknown User".to_string()) } else { - return Err(AppError::not_found("User not found")); + // Just update the sync type + sqlx::query("UPDATE Users SET Pod_Sync_Type = ? WHERE UserID = ?") + .bind(new_sync_type) + .bind(user_id) + .execute(pool) + .await?; } } + } + + Ok(true) + } + + // Helper function to create device via gPodder API - matches Python create device logic exactly + pub async fn create_device_via_gpodder_api(&self, gpodder_url: &str, username: &str, token: &str, device_name: &str) -> AppResult { + use reqwest; + use serde_json; + + // Use correct authentication based on internal vs external + let (client, auth_method) = if gpodder_url == "http://localhost:8042" { + // Internal GPodder API - use X-GPodder-Token header + let client = reqwest::Client::new(); + (client, "internal") + } else { + // External GPodder API - use session auth with basic fallback + let decrypted_password = self.decrypt_password(token).await?; + let session = self.create_gpodder_session_with_password(gpodder_url, username, &decrypted_password).await?; + (session.client, "external") }; - // Get podcast details for feed metadata - exact Python logic - let (podcast_name, feed_image, feed_description) = if podcast_filter { - match self { - DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#"SELECT podcastname, artworkurl, description FROM "Podcasts" WHERE podcastid = ANY($1)"#) - .bind(&effective_podcast_ids) - .fetch_optional(pool) - .await?; - - if let Some(row) = row { - ( - row.try_get::("podcastname").unwrap_or_else(|_| "Unknown Podcast".to_string()), - row.try_get::("artworkurl").unwrap_or_else(|_| "/var/www/html/static/assets/favicon.png".to_string()), - row.try_get::("description").unwrap_or_else(|_| "No description available".to_string()), - ) - } else { - ("Unknown Podcast".to_string(), "/var/www/html/static/assets/favicon.png".to_string(), "No description available".to_string()) - } - } - DatabasePool::MySQL(pool) => { - if effective_podcast_ids.len() == 1 { - let row = sqlx::query("SELECT PodcastName, ArtworkURL, Description FROM Podcasts WHERE PodcastID = ?") - .bind(effective_podcast_ids[0]) - .fetch_optional(pool) - .await?; - - if let Some(row) = row { - ( - row.try_get::("PodcastName").unwrap_or_else(|_| "Unknown Podcast".to_string()), - row.try_get::("ArtworkURL").unwrap_or_else(|_| "/var/www/html/static/assets/favicon.png".to_string()), - row.try_get::("Description").unwrap_or_else(|_| "No description available".to_string()), - ) - } else { - ("Unknown Podcast".to_string(), "/var/www/html/static/assets/favicon.png".to_string(), "No description available".to_string()) - } - } else { - let placeholders = vec!["?"; effective_podcast_ids.len()].join(","); - let query_str = format!("SELECT PodcastName, ArtworkURL, Description FROM Podcasts WHERE PodcastID IN ({})", placeholders); - let mut query = sqlx::query(&query_str); - for &id in &effective_podcast_ids { - query = query.bind(id); - } - let row = query.fetch_optional(pool).await?; - - if let Some(row) = row { - ( - row.try_get::("PodcastName").unwrap_or_else(|_| "Unknown Podcast".to_string()), - row.try_get::("ArtworkURL").unwrap_or_else(|_| "/var/www/html/static/assets/favicon.png".to_string()), - row.try_get::("Description").unwrap_or_else(|_| "No description available".to_string()), - ) - } else { - ("Unknown Podcast".to_string(), "/var/www/html/static/assets/favicon.png".to_string(), "No description available".to_string()) + // First, check if device already exists + let device_list_url = format!("{}/api/2/devices/{}.json", gpodder_url.trim_end_matches('/'), username); + + let response = if auth_method == "internal" { + client.get(&device_list_url) + .header("X-GPodder-Token", token) + .send() + .await + } else { + // External - session auth with basic fallback handled by session client + let decrypted_password = self.decrypt_password(token).await?; + client.get(&device_list_url) + .basic_auth(username, Some(&decrypted_password)) + .send() + .await + }; + + match response { + Ok(response) if response.status().is_success() => { + if let Ok(devices) = response.json::>().await { + for device in devices { + if device.get("id").and_then(|v| v.as_str()) == Some(device_name) { + tracing::info!("Found existing device with ID: {}", device_name); + return Ok(device_name.to_string()); } } } } - } else { - ("All Podcasts".to_string(), "/var/www/html/static/assets/favicon.png".to_string(), "RSS feed for all podcasts from Pinepods".to_string()) - }; - - // Build RSS feed content with proper iTunes namespace - exact Python format - let mut rss_content = String::new(); - rss_content.push_str("\n"); - rss_content.push_str("\n"); - rss_content.push_str("\n"); - rss_content.push_str(&format!(" Pinepods - {}\n", podcast_name)); - rss_content.push_str(" https://github.com/madeofpendletonwool/pinepods\n"); - rss_content.push_str(&format!(" {}\n", feed_description)); - rss_content.push_str(" en\n"); - rss_content.push_str(&format!(" {}\n", username)); - rss_content.push_str(&format!(" \n", feed_image)); - rss_content.push_str(" \n"); - rss_content.push_str(&format!(" {}\n", feed_image)); - rss_content.push_str(&format!(" {}\n", podcast_name)); - rss_content.push_str(" https://github.com/madeofpendletonwool/pinepods\n"); - rss_content.push_str(" \n"); - rss_content.push_str(" 60\n"); + Ok(response) => { + tracing::warn!("Failed to fetch device list: {}", response.status()); + } + Err(e) => { + tracing::warn!("Error fetching device list: {}", e); + } + } - // Get or create RSS key for this user to use in stream URLs - let user_rss_key = self.get_or_create_user_rss_key(user_id).await?; + // Device doesn't exist, create it + let device_url = format!("{}/api/2/devices/{}/{}.json", gpodder_url.trim_end_matches('/'), username, device_name); + let device_data = serde_json::json!({ + "caption": format!("PinePods Internal Device {}", device_name.split('-').last().unwrap_or("unknown")), + "type": "server" + }); - // Get episodes (use the user's RSS key for stream URLs, not the requesting key) - let episodes = self.get_rss_episodes(user_id, limit, source_type, &effective_podcast_ids, podcast_filter, domain, &user_rss_key).await?; + let create_response = if auth_method == "internal" { + client.post(&device_url) + .header("X-GPodder-Token", token) + .json(&device_data) + .send() + .await + } else { + let decrypted_password = self.decrypt_password(token).await?; + client.post(&device_url) + .basic_auth(username, Some(&decrypted_password)) + .json(&device_data) + .send() + .await + }; - for episode in episodes { - rss_content.push_str(" \n"); - rss_content.push_str(&format!(" <![CDATA[{}]]>\n", episode.title)); - rss_content.push_str(&format!(" {}\n", episode.url)); - rss_content.push_str(&format!(" \n", episode.description)); - rss_content.push_str(&format!(" {}\n", episode.url)); - rss_content.push_str(&format!(" {}\n", episode.pub_date)); - if let Some(ref author) = episode.author { - rss_content.push_str(&format!(" {}\n", author)); + match create_response + { + Ok(response) if response.status().is_success() => { + tracing::info!("Created device with ID: {}", device_name); + Ok(device_name.to_string()) } - if let Some(ref artwork_url) = episode.artwork_url { - rss_content.push_str(&format!(" \n", artwork_url)); + Ok(response) => { + let status = response.status(); + let error_text = response.text().await.unwrap_or_else(|_| "Unknown error".to_string()); + Err(AppError::internal(&format!("Failed to create device: {} - {}", status, error_text))) + } + Err(e) => { + Err(AppError::internal(&format!("Error creating device via API: {}", e))) } - rss_content.push_str(&format!(" \n", - episode.url, episode.duration.unwrap_or(0))); - rss_content.push_str(" \n"); } - - rss_content.push_str("\n"); - rss_content.push_str("\n"); - - Ok(rss_content) } - - // Get user RSS key - matches Python get_user_rss_key function - pub async fn get_user_rss_key(&self, user_id: i32) -> AppResult> { - match self { - DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#"SELECT rsskey FROM "RssKeys" WHERE userid = $1"#) - .bind(user_id) - .fetch_optional(pool) - .await?; - - if let Some(row) = row { - Ok(row.try_get("rsskey").ok()) - } else { - Ok(None) - } + + // Background task for GPodder subscription refresh - matches Python refresh_gpodder_subscription_for_background + pub async fn refresh_gpodder_subscription_background(&self, user_id: i32) -> AppResult { + // Get user sync settings + let settings_opt = self.get_user_sync_settings(user_id).await?; + let settings = match settings_opt { + Some(s) => s, + None => return Ok(false), // No sync configured + }; + + // Get default device + let device_name = match self.get_default_gpodder_device_name(user_id).await? { + Some(name) => name, + None => format!("pinepods-internal-{}", user_id), // Fallback device name + }; + + // Call the appropriate sync method based on sync type + match settings.sync_type.as_str() { + "gpodder" => { + // Internal GPodder API - token is already unencrypted for internal use + self.call_gpodder_service_sync(user_id, "http://localhost:8042", &settings.username, &settings.token, &device_name, false).await + } + "external" => { + // External GPodder server - decrypt token using existing encryption system + let decrypted_token = match self.decrypt_gpodder_token(&settings.token).await { + Ok(token) => token, + Err(_) => settings.token.clone(), // Fallback to original token if decryption fails + }; + self.call_gpodder_service_sync(user_id, &settings.url, &settings.username, &decrypted_token, &device_name, false).await + } + "both" => { + // Both internal and external + let internal_result = self.call_gpodder_service_sync(user_id, "http://localhost:8042", &settings.username, &settings.token, &device_name, false).await?; + let decrypted_token = match self.decrypt_gpodder_token(&settings.token).await { + Ok(token) => token, + Err(_) => settings.token.clone(), + }; + let external_result = self.call_gpodder_service_sync(user_id, &settings.url, &settings.username, &decrypted_token, &device_name, false).await?; + Ok(internal_result || external_result) } - DatabasePool::MySQL(pool) => { - let row = sqlx::query("SELECT RssKey FROM RssKeys WHERE UserID = ?") - .bind(user_id) - .fetch_optional(pool) - .await?; - - if let Some(row) = row { - Ok(row.try_get("RssKey").ok()) - } else { - Ok(None) - } + "nextcloud" => { + // Nextcloud sync - use existing nextcloud refresh functionality + self.refresh_nextcloud_subscription_background(user_id).await } + _ => Ok(false), // No sync or unsupported type } } - - // Get or create RSS key for user - ensures user always has an RSS key for stream URLs - pub async fn get_or_create_user_rss_key(&self, user_id: i32) -> AppResult { + + // Helper to get default device name + async fn get_default_gpodder_device_name(&self, user_id: i32) -> AppResult> { match self { DatabasePool::Postgres(pool) => { - // Try to get existing RSS key - let existing_key = sqlx::query(r#"SELECT rsskey FROM "RssKeys" WHERE userid = $1 LIMIT 1"#) + let row = sqlx::query(r#"SELECT devicename FROM "GpodderDevices" WHERE userid = $1 AND isdefault = true LIMIT 1"#) .bind(user_id) .fetch_optional(pool) .await?; - if let Some(row) = existing_key { - let key: String = row.try_get("rsskey")?; - Ok(key) - } else { - // Create new RSS key - let new_key = uuid::Uuid::new_v4().to_string(); - sqlx::query(r#"INSERT INTO "RssKeys" (userid, rsskey) VALUES ($1, $2)"#) - .bind(user_id) - .bind(&new_key) - .execute(pool) - .await?; - Ok(new_key) - } + Ok(row.and_then(|r| r.try_get("devicename").ok())) } DatabasePool::MySQL(pool) => { - // Try to get existing RSS key - let existing_key = sqlx::query("SELECT RssKey FROM RssKeys WHERE UserID = ? LIMIT 1") + let row = sqlx::query("SELECT DeviceName FROM GpodderDevices WHERE UserID = ? AND IsDefault = 1 LIMIT 1") .bind(user_id) .fetch_optional(pool) .await?; - if let Some(row) = existing_key { - let key: String = row.try_get("RssKey")?; - Ok(key) - } else { - // Create new RSS key - let new_key = uuid::Uuid::new_v4().to_string(); - sqlx::query("INSERT INTO RssKeys (UserID, RssKey) VALUES (?, ?)") - .bind(user_id) - .bind(&new_key) - .execute(pool) - .await?; - Ok(new_key) - } + Ok(row.and_then(|r| r.try_get("DeviceName").ok())) } } } - - // Helper function to get RSS episodes - async fn get_rss_episodes( - &self, - user_id: i32, - limit: i32, - source_type: Option<&str>, - podcast_ids: &[i32], - podcast_filter: bool, - domain: &str, - api_key: &str, - ) -> AppResult> { - use chrono::{DateTime, Utc}; - - match self { - DatabasePool::Postgres(pool) => { - let mut base_query = r#" - SELECT - e.episodeid, - e.podcastid, - e.episodetitle, - e.episodedescription, - CASE WHEN de.episodeid IS NULL - THEN e.episodeurl - ELSE CONCAT(CAST($1 AS TEXT), '/api/data/stream/', e.episodeid, '?api_key=', CAST($2 AS TEXT), '&user_id=', pp.userid) - END as episodeurl, - e.episodeartwork, - e.episodepubdate, - e.episodeduration, - pp.podcastname, - pp.author, - pp.artworkurl, - pp.description as podcastdescription - FROM "Episodes" e - JOIN "Podcasts" pp ON e.podcastid = pp.podcastid - LEFT JOIN "DownloadedEpisodes" de ON e.episodeid = de.episodeid - WHERE pp.userid = $3 - "#.to_string(); - - let mut param_count = 3; - if podcast_filter && !podcast_ids.is_empty() { - param_count += 1; - base_query.push_str(&format!(" AND pp.podcastid = ANY(${})", param_count)); - } - - // Add YouTube union if needed (exact Python logic) - let add_youtube_union = source_type.is_none() || source_type == Some("youtube"); - if add_youtube_union { - base_query.push_str(r#" - UNION ALL - SELECT - y.videoid as episodeid, - y.podcastid, - y.videotitle as episodetitle, - y.videodescription as episodedescription, - CONCAT(CAST($1 AS TEXT), '/api/data/stream/', CAST(y.videoid AS TEXT), '?api_key=', CAST($2 AS TEXT), '&type=youtube&user_id=', pv.userid) as episodeurl, - y.thumbnailurl as episodeartwork, - y.publishedat as episodepubdate, - y.duration as episodeduration, - pv.podcastname, - pv.author, - pv.artworkurl, - pv.description as podcastdescription - FROM "YouTubeVideos" y - JOIN "Podcasts" pv on y.podcastid = pv.podcastid - WHERE pv.userid = $3 - "#); - - if podcast_filter && !podcast_ids.is_empty() { - base_query.push_str(&format!(" AND pv.podcastid = ANY(${})", param_count)); - } - } - base_query.push_str(" ORDER BY episodepubdate DESC"); - if limit > 0 { - base_query.push_str(&format!(" LIMIT {}", limit)); - } - - // Execute query - let mut query = sqlx::query(&base_query) - .bind(domain) - .bind(api_key) - .bind(user_id); - - if podcast_filter && !podcast_ids.is_empty() { - query = query.bind(podcast_ids); - } - - let rows = query.fetch_all(pool).await?; - - let mut episodes = Vec::new(); - for row in rows { - let title: String = row.try_get("episodetitle").unwrap_or_else(|_| "Untitled Episode".to_string()); - let description: String = row.try_get("episodedescription").unwrap_or_else(|_| String::new()); - let url: String = row.try_get("episodeurl").unwrap_or_else(|_| String::new()); - let duration: Option = row.try_get("episodeduration").ok(); - let author: Option = row.try_get("author").ok(); - let artwork_url: Option = row.try_get("episodeartwork").ok(); - - let pub_date = if let Ok(dt) = row.try_get::, _>("episodepubdate") { - dt.format("%a, %d %b %Y %H:%M:%S %z").to_string() - } else { - Utc::now().format("%a, %d %b %Y %H:%M:%S %z").to_string() - }; + // Decrypt GPodder token using existing encryption system - matches Python token decryption + // Get comprehensive GPodder server statistics by calling actual GPodder API endpoints + pub async fn get_gpodder_server_statistics(&self, user_id: i32) -> AppResult { + use crate::handlers::sync::{GpodderStatistics, ServerDevice, ServerSubscription, ServerEpisodeAction, EndpointTest}; + use std::time::Instant; + + // Get user's sync settings using the same method as sync operations + let sync_settings = self.get_user_sync_settings(user_id).await?; + let settings = match sync_settings { + Some(s) => s, + None => { + return Ok(GpodderStatistics { + server_url: "No sync configured".to_string(), + sync_type: "None".to_string(), + sync_enabled: false, + server_devices: vec![], + total_devices: 0, + server_subscriptions: vec![], + total_subscriptions: 0, + recent_episode_actions: vec![], + total_episode_actions: 0, + connection_status: "Not configured".to_string(), + last_sync_timestamp: None, + api_endpoints_tested: vec![], + }); + } + }; - episodes.push(RssEpisode { - title, - description, - url, - pub_date, - duration, - author, - artwork_url, - }); - } - - Ok(episodes) + // Use the same authentication logic as sync operations + let (gpodder_url, username, password) = match settings.sync_type.as_str() { + "gpodder" => { + // Internal gPodder API - use token directly (no decryption needed) + ("http://localhost:8042".to_string(), settings.username.clone(), settings.token.clone()) } - DatabasePool::MySQL(pool) => { - let mut base_query = r#" - SELECT - e.EpisodeID, - e.PodcastID, - e.EpisodeTitle COLLATE utf8mb4_unicode_ci as EpisodeTitle, - e.EpisodeDescription COLLATE utf8mb4_unicode_ci as EpisodeDescription, - CASE WHEN de.EpisodeID IS NULL - THEN e.EpisodeURL COLLATE utf8mb4_unicode_ci - ELSE CONCAT(CAST(? AS CHAR), '/api/data/stream/', CAST(e.EpisodeID AS CHAR), '?api_key=', CAST(? AS CHAR), '&user_id=', pp.UserID) - END COLLATE utf8mb4_unicode_ci as EpisodeURL, - e.EpisodeArtwork COLLATE utf8mb4_unicode_ci as EpisodeArtwork, - e.EpisodePubDate, - e.EpisodeDuration, - pp.PodcastName COLLATE utf8mb4_unicode_ci as PodcastName, - pp.Author COLLATE utf8mb4_unicode_ci as Author, - pp.ArtworkURL COLLATE utf8mb4_unicode_ci as ArtworkURL, - pp.Description COLLATE utf8mb4_unicode_ci as PodcastDescription - FROM Episodes e - JOIN Podcasts pp ON e.PodcastID = pp.PodcastID - LEFT JOIN DownloadedEpisodes de ON e.EpisodeID = de.EpisodeID - WHERE pp.UserID = ? - "#.to_string(); + "external" => { + // External gPodder server - decrypt token first + let decrypted_token = self.decrypt_password(&settings.token).await?; + (settings.url.clone(), settings.username.clone(), decrypted_token) + } + "nextcloud" => { + // Nextcloud sync - decrypt token first + let decrypted_token = self.decrypt_password(&settings.token).await?; + (settings.url.clone(), settings.username.clone(), decrypted_token) + } + _ => { + return Ok(GpodderStatistics { + server_url: settings.url.clone(), + sync_type: settings.sync_type.clone(), + sync_enabled: false, + server_devices: vec![], + total_devices: 0, + server_subscriptions: vec![], + total_subscriptions: 0, + recent_episode_actions: vec![], + total_episode_actions: 0, + connection_status: "Unsupported sync type".to_string(), + last_sync_timestamp: None, + api_endpoints_tested: vec![], + }); + } + }; + + let mut api_endpoints_tested = Vec::new(); + let mut server_devices = Vec::new(); + let mut server_subscriptions = Vec::new(); + let mut recent_episode_actions = Vec::new(); - if podcast_filter && !podcast_ids.is_empty() { - let placeholders = vec!["?"; podcast_ids.len()].join(","); - base_query.push_str(&format!(" AND pp.PodcastID IN ({})", placeholders)); + // Handle Nextcloud differently from standard GPodder API + if settings.sync_type == "nextcloud" { + // Nextcloud uses different endpoints and doesn't have devices concept + let client = reqwest::Client::new(); + + // Test 1: Get subscriptions from Nextcloud + let subscriptions_url = format!("{}/index.php/apps/gpoddersync/subscriptions", gpodder_url.trim_end_matches('/')); + let start = Instant::now(); + + let subscriptions_response = client + .get(&subscriptions_url) + .basic_auth(&username, Some(&password)) + .send() + .await; + + match subscriptions_response { + Ok(resp) if resp.status().is_success() => { + let duration = start.elapsed().as_millis() as i64; + api_endpoints_tested.push(EndpointTest { + endpoint: "GET /index.php/apps/gpoddersync/subscriptions".to_string(), + status: "success".to_string(), + response_time_ms: Some(duration), + error: None, + }); + + match resp.json::().await { + Ok(subs_data) => { + tracing::info!("Nextcloud subscriptions response: {:?}", subs_data); + if let Some(subs_array) = subs_data.as_array() { + tracing::info!("Found {} subscriptions in Nextcloud array", subs_array.len()); + for sub in subs_array { + if let Some(url) = sub.as_str() { + server_subscriptions.push(ServerSubscription { + url: url.to_string(), + title: None, + description: None, + }); + } + } + } else { + tracing::warn!("Nextcloud subscriptions response is not an array: {:?}", subs_data); + } + } + Err(e) => { + tracing::warn!("Failed to parse Nextcloud subscriptions response: {}", e); + } + } + } + Ok(resp) => { + let duration = start.elapsed().as_millis() as i64; + api_endpoints_tested.push(EndpointTest { + endpoint: "GET /index.php/apps/gpoddersync/subscriptions".to_string(), + status: "failed".to_string(), + response_time_ms: Some(duration), + error: Some(format!("HTTP {}", resp.status())), + }); + } + Err(e) => { + let duration = start.elapsed().as_millis() as i64; + api_endpoints_tested.push(EndpointTest { + endpoint: "GET /index.php/apps/gpoddersync/subscriptions".to_string(), + status: "failed".to_string(), + response_time_ms: Some(duration), + error: Some(e.to_string()), + }); } + } - // Add YouTube union if needed - let add_youtube_union = source_type.is_none() || source_type == Some("youtube"); - if add_youtube_union { - base_query.push_str(r#" - UNION ALL - SELECT - y.VideoID as EpisodeID, - y.PodcastID as PodcastID, - y.VideoTitle COLLATE utf8mb4_unicode_ci as EpisodeTitle, - y.VideoDescription COLLATE utf8mb4_unicode_ci as EpisodeDescription, - CONCAT(CAST(? AS CHAR), '/api/data/stream/', CAST(y.VideoID AS CHAR), '?api_key=', CAST(? AS CHAR), '&type=youtube&user_id=', pv.UserID) COLLATE utf8mb4_unicode_ci as EpisodeURL, - y.ThumbnailURL COLLATE utf8mb4_unicode_ci as EpisodeArtwork, - y.PublishedAt as EpisodePubDate, - y.Duration as EpisodeDuration, - pv.PodcastName COLLATE utf8mb4_unicode_ci as PodcastName, - pv.Author COLLATE utf8mb4_unicode_ci as Author, - pv.ArtworkURL COLLATE utf8mb4_unicode_ci as ArtworkURL, - pv.Description COLLATE utf8mb4_unicode_ci as PodcastDescription - FROM YouTubeVideos y - JOIN Podcasts pv on y.PodcastID = pv.PodcastID - WHERE pv.UserID = ? - "#); + // Test 2: Get episode actions from Nextcloud + let episode_actions_url = format!("{}/index.php/apps/gpoddersync/episode_action", gpodder_url.trim_end_matches('/')); + let start = Instant::now(); - if podcast_filter && !podcast_ids.is_empty() { - let placeholders = vec!["?"; podcast_ids.len()].join(","); - base_query.push_str(&format!(" AND pv.PodcastID IN ({})", placeholders)); + let episode_response = client + .get(&episode_actions_url) + .basic_auth(&username, Some(&password)) + .send() + .await; + + match episode_response { + Ok(resp) if resp.status().is_success() => { + let duration = start.elapsed().as_millis() as i64; + api_endpoints_tested.push(EndpointTest { + endpoint: "GET /index.php/apps/gpoddersync/episode_action".to_string(), + status: "success".to_string(), + response_time_ms: Some(duration), + error: None, + }); + + match resp.json::().await { + Ok(episode_data) => { + if let Some(actions) = episode_data.get("actions").and_then(|v| v.as_array()) { + for action in actions.iter().take(10) { // Show last 10 actions + recent_episode_actions.push(ServerEpisodeAction { + podcast: action["podcast"].as_str().unwrap_or("").to_string(), + episode: action["episode"].as_str().unwrap_or("").to_string(), + action: action["action"].as_str().unwrap_or("").to_string(), + timestamp: action["timestamp"].as_str().unwrap_or("").to_string(), + position: action["position"].as_i64().map(|p| p as i32), + device: Some("nextcloud".to_string()), + }); + } + } + } + Err(e) => { + tracing::warn!("Failed to parse Nextcloud episode actions response: {}", e); + } } } - - base_query.push_str(" ORDER BY EpisodePubDate DESC"); - if limit > 0 { - base_query.push_str(&format!(" LIMIT {}", limit)); + Ok(resp) => { + let duration = start.elapsed().as_millis() as i64; + api_endpoints_tested.push(EndpointTest { + endpoint: "GET /index.php/apps/gpoddersync/episode_action".to_string(), + status: "failed".to_string(), + response_time_ms: Some(duration), + error: Some(format!("HTTP {}", resp.status())), + }); } + Err(e) => { + let duration = start.elapsed().as_millis() as i64; + api_endpoints_tested.push(EndpointTest { + endpoint: "GET /index.php/apps/gpoddersync/episode_action".to_string(), + status: "failed".to_string(), + response_time_ms: Some(duration), + error: Some(e.to_string()), + }); + } + } - // Build query with parameters - let mut query = sqlx::query(&base_query) - .bind(domain) - .bind(api_key) - .bind(user_id); + // Nextcloud doesn't have devices concept, so add a fake device entry + server_devices.push(ServerDevice { + id: "nextcloud".to_string(), + caption: "Nextcloud gPodder Sync".to_string(), + device_type: "cloud".to_string(), + subscriptions: server_subscriptions.len() as i32, + }); + } else { + // Standard GPodder API (internal or external) + // Create GPodder session directly with already-decrypted password to avoid double decryption + let session = self.create_gpodder_session_with_password(&gpodder_url, &username, &password).await?; - if podcast_filter && !podcast_ids.is_empty() { - for &id in podcast_ids { - query = query.bind(id); + // Test 1: Get devices from GPodder API + let devices_url = format!("{}/api/2/devices/{}.json", gpodder_url.trim_end_matches('/'), username); + let start = Instant::now(); + + let devices_response = if session.authenticated { + session.client.get(&devices_url).send().await + } else { + session.client.get(&devices_url).basic_auth(&username, Some(&password)).send().await + }; + + match devices_response + { + Ok(resp) if resp.status().is_success() => { + let duration = start.elapsed().as_millis() as i64; + api_endpoints_tested.push(EndpointTest { + endpoint: "GET /api/2/devices/{username}.json".to_string(), + status: "success".to_string(), + response_time_ms: Some(duration), + error: None, + }); + + match resp.json::().await { + Ok(devices_data) => { + if let Some(devices_array) = devices_data.as_array() { + for device in devices_array { + server_devices.push(ServerDevice { + id: device["id"].as_str().unwrap_or("unknown").to_string(), + caption: device["caption"].as_str().unwrap_or("").to_string(), + device_type: device["type"].as_str().unwrap_or("unknown").to_string(), + subscriptions: device["subscriptions"].as_i64().unwrap_or(0) as i32, + }); + } + } + } + Err(e) => { + tracing::warn!("Failed to parse devices response: {}", e); } } + } + Ok(resp) => { + let duration = start.elapsed().as_millis() as i64; + api_endpoints_tested.push(EndpointTest { + endpoint: "GET /api/2/devices/{username}.json".to_string(), + status: "failed".to_string(), + response_time_ms: Some(duration), + error: Some(format!("HTTP {}", resp.status())), + }); + } + Err(e) => { + let duration = start.elapsed().as_millis() as i64; + api_endpoints_tested.push(EndpointTest { + endpoint: "GET /api/2/devices/{username}.json".to_string(), + status: "failed".to_string(), + response_time_ms: Some(duration), + error: Some(e.to_string()), + }); + } + } - if add_youtube_union { - query = query.bind(domain).bind(api_key).bind(user_id); - if podcast_filter && !podcast_ids.is_empty() { - for &id in podcast_ids { - query = query.bind(id); + // Test 2: Get subscriptions from GPodder API - use the user's actual default device + let device_name = self.get_or_create_default_device(user_id).await?; + let subscriptions_url = format!("{}/api/2/subscriptions/{}/{}.json?since=0", + gpodder_url.trim_end_matches('/'), username, device_name); + let start = Instant::now(); + + let subscriptions_response = if session.authenticated { + session.client.get(&subscriptions_url).send().await + } else { + session.client.get(&subscriptions_url).basic_auth(&username, Some(&password)).send().await + }; + + match subscriptions_response + { + Ok(resp) if resp.status().is_success() => { + let duration = start.elapsed().as_millis() as i64; + api_endpoints_tested.push(EndpointTest { + endpoint: "GET /api/2/subscriptions/{username}/{device}.json?since=0".to_string(), + status: "success".to_string(), + response_time_ms: Some(duration), + error: None, + }); + + match resp.json::().await { + Ok(subs_data) => { + // GPodder API returns subscriptions in format: {"add": ["url1", "url2"], "remove": ["url3"]} + if let Some(add_array) = subs_data["add"].as_array() { + for sub in add_array { + if let Some(url) = sub.as_str() { + server_subscriptions.push(ServerSubscription { + url: url.to_string(), + title: None, + description: None, + }); + } + } } } + Err(e) => { + tracing::warn!("Failed to parse subscriptions response: {}", e); + } } + } + Ok(resp) => { + let duration = start.elapsed().as_millis() as i64; + api_endpoints_tested.push(EndpointTest { + endpoint: "GET /api/2/subscriptions/{username}/{device}.json?since=0".to_string(), + status: "failed".to_string(), + response_time_ms: Some(duration), + error: Some(format!("HTTP {}", resp.status())), + }); + } + Err(e) => { + let duration = start.elapsed().as_millis() as i64; + api_endpoints_tested.push(EndpointTest { + endpoint: "GET /api/2/subscriptions/{username}/{device}.json?since=0".to_string(), + status: "failed".to_string(), + response_time_ms: Some(duration), + error: Some(e.to_string()), + }); + } + } - let rows = query.fetch_all(pool).await?; - - let mut episodes = Vec::new(); - for row in rows { - let title: String = row.try_get("EpisodeTitle").unwrap_or_else(|_| "Untitled Episode".to_string()); - let description: String = row.try_get("EpisodeDescription").unwrap_or_else(|_| String::new()); - let url: String = row.try_get("EpisodeURL").unwrap_or_else(|_| String::new()); - let duration: Option = row.try_get("EpisodeDuration").ok(); - let author: Option = row.try_get("Author").ok(); - let artwork_url: Option = row.try_get("EpisodeArtwork").ok(); - - let pub_date = if let Ok(dt) = row.try_get::, _>("EpisodePubDate") { - dt.format("%a, %d %b %Y %H:%M:%S %z").to_string() - } else { - Utc::now().format("%a, %d %b %Y %H:%M:%S %z").to_string() - }; + // Test 3: Get episode actions from GPodder API + let episodes_url = format!("{}/api/2/episodes/{}.json?since=0&device={}", + gpodder_url.trim_end_matches('/'), username, device_name); + let start = Instant::now(); - episodes.push(RssEpisode { - title, - description, - url, - pub_date, - duration, - author, - artwork_url, - }); + let episodes_response = if session.authenticated { + session.client.get(&episodes_url).send().await + } else { + session.client.get(&episodes_url).basic_auth(&username, Some(&password)).send().await + }; + + match episodes_response + { + Ok(resp) if resp.status().is_success() => { + let duration = start.elapsed().as_millis() as i64; + api_endpoints_tested.push(EndpointTest { + endpoint: "GET /api/2/episodes/{username}.json?since=0&device={device}".to_string(), + status: "success".to_string(), + response_time_ms: Some(duration), + error: None, + }); + + match resp.json::().await { + Ok(episodes_data) => { + if let Some(actions) = episodes_data["actions"].as_array() { + for action in actions.iter().take(10) { // Show last 10 actions + recent_episode_actions.push(ServerEpisodeAction { + podcast: action["podcast"].as_str().unwrap_or("").to_string(), + episode: action["episode"].as_str().unwrap_or("").to_string(), + action: action["action"].as_str().unwrap_or("").to_string(), + timestamp: action["timestamp"].as_str().unwrap_or("").to_string(), + position: action["position"].as_i64().map(|p| p as i32), + device: action["device"].as_str().map(|s| s.to_string()), + }); + } + } + } + Err(e) => { + tracing::warn!("Failed to parse episode actions response: {}", e); + } } - - Ok(episodes) + } + Ok(resp) => { + let duration = start.elapsed().as_millis() as i64; + api_endpoints_tested.push(EndpointTest { + endpoint: "GET /api/2/episodes/{username}.json?since=0&device={device}".to_string(), + status: "failed".to_string(), + response_time_ms: Some(duration), + error: Some(format!("HTTP {}", resp.status())), + }); + } + Err(e) => { + let duration = start.elapsed().as_millis() as i64; + api_endpoints_tested.push(EndpointTest { + endpoint: "GET /api/2/episodes/{username}.json?since=0&device={device}".to_string(), + status: "failed".to_string(), + response_time_ms: Some(duration), + error: Some(e.to_string()), + }); } } + } + + // Get sync status + let status = self.gpodder_get_status(user_id).await?; + let last_sync = self.get_last_sync_timestamp(user_id).await?; + + // Determine overall connection status + let connection_status = if api_endpoints_tested.iter().any(|t| t.status == "success") { + if api_endpoints_tested.iter().all(|t| t.status == "success") { + "All endpoints working" + } else { + "Partial connectivity" + } + } else { + "Connection failed" + }; + + Ok(GpodderStatistics { + server_url: gpodder_url, + sync_type: status.sync_type.clone(), + sync_enabled: status.sync_type != "None", + server_devices: server_devices.clone(), + total_devices: server_devices.len() as i32, + server_subscriptions: server_subscriptions.clone(), + total_subscriptions: server_subscriptions.len() as i32, + recent_episode_actions: recent_episode_actions.clone(), + total_episode_actions: recent_episode_actions.len() as i32, + connection_status: connection_status.to_string(), + last_sync_timestamp: last_sync.map(|dt| dt.format("%Y-%m-%d %H:%M:%S UTC").to_string()), + api_endpoints_tested, + }) } - // Get podcast notification status - matches Python get_podcast_notification_status function - pub async fn get_podcast_notification_status(&self, podcast_id: i32, user_id: i32) -> AppResult { - match self { + async fn decrypt_gpodder_token(&self, encrypted_token: &str) -> AppResult { + // Get encryption key from app settings + let encryption_key = match self { DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#" - SELECT notificationsenabled - FROM "Podcasts" - WHERE podcastid = $1 AND userid = $2 - "#) - .bind(podcast_id) - .bind(user_id) + let row = sqlx::query(r#"SELECT encryptionkey FROM "AppSettings" WHERE appsettingsid = 1"#) .fetch_optional(pool) .await?; - if let Some(row) = row { - Ok(row.try_get("notificationsenabled").unwrap_or(false)) - } else { - Ok(false) - } + row.and_then(|r| r.try_get("encryptionkey").ok()) } DatabasePool::MySQL(pool) => { - let row = sqlx::query(" - SELECT NotificationsEnabled - FROM Podcasts - WHERE PodcastID = ? AND UserID = ? - ") - .bind(podcast_id) - .bind(user_id) + let row = sqlx::query("SELECT EncryptionKey FROM AppSettings WHERE AppSettingsID = 1") .fetch_optional(pool) .await?; - if let Some(row) = row { - let enabled: i8 = row.try_get("NotificationsEnabled").unwrap_or(0); - Ok(enabled != 0) - } else { - Ok(false) - } + row.and_then(|r| r.try_get("EncryptionKey").ok()) } - } + }; + + let encryption_key: String = encryption_key + .ok_or_else(|| AppError::internal("Encryption key not found"))?; + + // Decrypt using Fernet (matches Python implementation) + use fernet::Fernet; + let fernet = match Fernet::new(&encryption_key) { + Some(f) => f, + None => return Err(AppError::internal("Failed to create Fernet cipher with provided key")), + }; + + let decrypted = fernet.decrypt(encrypted_token) + .map_err(|e| AppError::internal(&format!("Failed to decrypt token: {}", e)))?; + + String::from_utf8(decrypted) + .map_err(|e| AppError::internal(&format!("Failed to parse decrypted token: {}", e))) } - // Get MFA secret - matches Python get_mfa_secret function - pub async fn get_mfa_secret(&self, user_id: i32) -> AppResult> { - match self { + // Nextcloud subscription refresh for background tasks - matches Python nextcloud refresh + async fn refresh_nextcloud_subscription_background(&self, user_id: i32) -> AppResult { + // Get user nextcloud settings + let settings = match self { DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#"SELECT mfa_secret FROM "Users" WHERE userid = $1"#) + let row = sqlx::query(r#"SELECT gpodderurl, gpoddertoken, gpodderloginname FROM "Users" WHERE userid = $1"#) .bind(user_id) .fetch_optional(pool) .await?; - if let Some(row) = row { - Ok(row.try_get("mfa_secret").ok()) + if let Some(r) = row { + ( + r.try_get::, _>("gpodderurl")?.unwrap_or_default(), + r.try_get::, _>("gpoddertoken")?.unwrap_or_default(), + r.try_get::, _>("gpodderloginname")?.unwrap_or_default(), + ) } else { - Ok(None) + return Ok(false); } } DatabasePool::MySQL(pool) => { - let row = sqlx::query("SELECT MFA_Secret FROM Users WHERE UserID = ?") + let row = sqlx::query("SELECT GpodderUrl, GpodderToken, GpodderLoginName FROM Users WHERE UserID = ?") .bind(user_id) .fetch_optional(pool) .await?; - if let Some(row) = row { - Ok(row.try_get("MFA_Secret").ok()) + if let Some(r) = row { + ( + r.try_get::, _>("GpodderUrl")?.unwrap_or_default(), + r.try_get::, _>("GpodderToken")?.unwrap_or_default(), + r.try_get::, _>("GpodderLoginName")?.unwrap_or_default(), + ) } else { - Ok(None) - } - } - } - } - - // Return YouTube episodes - matches Python return_youtube_episodes function exactly - pub async fn return_youtube_episodes( - &self, - user_id: i32, - podcast_id: i32, - ) -> AppResult>> { - match self { - DatabasePool::Postgres(pool) => { - let rows = sqlx::query(r#" - SELECT "Podcasts".podcastid, "Podcasts".podcastname, "YouTubeVideos".videoid AS episodeid, - "YouTubeVideos".videotitle AS episodetitle, "YouTubeVideos".publishedat AS episodepubdate, - "YouTubeVideos".videodescription AS episodedescription, - "YouTubeVideos".thumbnailurl AS episodeartwork, "YouTubeVideos".videourl AS episodeurl, - "YouTubeVideos".duration AS episodeduration, - "YouTubeVideos".listenposition AS listenduration, - "YouTubeVideos".youtubevideoid AS guid - FROM "YouTubeVideos" - INNER JOIN "Podcasts" ON "YouTubeVideos".podcastid = "Podcasts".podcastid - WHERE "Podcasts".podcastid = $1 AND "Podcasts".userid = $2 - ORDER BY "YouTubeVideos".publishedat DESC - "#) - .bind(podcast_id) - .bind(user_id) - .fetch_all(pool) - .await?; - - if rows.is_empty() { - return Ok(None); - } - - let mut episodes = Vec::new(); - for row in rows { - let episode = serde_json::json!({ - "Podcastid": row.try_get::("podcastid").unwrap_or(0), - "Podcastname": row.try_get::("podcastname").unwrap_or_default(), - "Episodeid": row.try_get::("episodeid").unwrap_or(0), - "Episodetitle": row.try_get::("episodetitle").unwrap_or_default(), - "Episodepubdate": row.try_get::("episodepubdate") - .map(|dt| dt.and_utc().to_rfc3339()) - .unwrap_or_default(), - "Episodedescription": row.try_get::("episodedescription").unwrap_or_default(), - "Episodeartwork": row.try_get::("episodeartwork").unwrap_or_default(), - "Episodeurl": row.try_get::("episodeurl").unwrap_or_default(), - "Episodeduration": row.try_get::("episodeduration").unwrap_or(0), - "Listenduration": row.try_get::("listenduration").unwrap_or(0), - "Guid": row.try_get::("guid").unwrap_or_default() - }); - episodes.push(episode); - } - - Ok(Some(episodes)) - } - DatabasePool::MySQL(pool) => { - let rows = sqlx::query(r#" - SELECT Podcasts.PodcastID, Podcasts.PodcastName, YouTubeVideos.VideoID AS EpisodeID, - YouTubeVideos.VideoTitle AS EpisodeTitle, YouTubeVideos.PublishedAt AS EpisodePubDate, - YouTubeVideos.VideoDescription AS EpisodeDescription, - YouTubeVideos.ThumbnailURL AS EpisodeArtwork, YouTubeVideos.VideoURL AS EpisodeURL, - YouTubeVideos.Duration AS EpisodeDuration, - YouTubeVideos.ListenPosition AS ListenDuration, - YouTubeVideos.YouTubeVideoID AS guid - FROM YouTubeVideos - INNER JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID - WHERE Podcasts.PodcastID = ? AND Podcasts.UserID = ? - ORDER BY YouTubeVideos.PublishedAt DESC - "#) - .bind(podcast_id) - .bind(user_id) - .fetch_all(pool) - .await?; - - if rows.is_empty() { - return Ok(None); - } - - let mut episodes = Vec::new(); - for row in rows { - let episode = serde_json::json!({ - "Podcastid": row.try_get::("PodcastID").unwrap_or(0), - "Podcastname": row.try_get::("PodcastName").unwrap_or_default(), - "Episodeid": row.try_get::("EpisodeID").unwrap_or(0), - "Episodetitle": row.try_get::("EpisodeTitle").unwrap_or_default(), - "Episodepubdate": row.try_get::("EpisodePubDate") - .map(|dt| dt.and_utc().to_rfc3339()) - .unwrap_or_default(), - "Episodedescription": row.try_get::("EpisodeDescription").unwrap_or_default(), - "Episodeartwork": row.try_get::("EpisodeArtwork").unwrap_or_default(), - "Episodeurl": row.try_get::("EpisodeURL").unwrap_or_default(), - "Episodeduration": row.try_get::("EpisodeDuration").unwrap_or(0), - "Listenduration": row.try_get::("ListenDuration").unwrap_or(0), - "Guid": row.try_get::("guid").unwrap_or_default() - }); - episodes.push(episode); + return Ok(false); } + } + }; + + let (gpodder_url, _gpodder_token, gpodder_login) = settings; + + if gpodder_url.is_empty() || gpodder_login.is_empty() { + return Ok(false); + } + + // Call existing nextcloud sync functionality + self.sync_with_nextcloud_for_user(user_id).await + } - Ok(Some(episodes)) - } - } - } - - // Remove YouTube channel by URL - matches Python remove_youtube_channel_by_url function exactly - pub async fn remove_youtube_channel_by_url( - &self, - channel_name: &str, - channel_url: &str, - user_id: i32, - ) -> AppResult<()> { - println!("got to remove youtube channel"); - - // Get the PodcastID first - let podcast_id = match self { - DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#" - SELECT podcastid - FROM "Podcasts" - WHERE podcastname = $1 - AND feedurl = $2 - AND userid = $3 - AND isyoutubechannel = TRUE - "#) - .bind(channel_name) - .bind(channel_url) - .bind(user_id) - .fetch_optional(pool) - .await?; - + // Get last sync timestamp for incremental sync - PROPER GPodder spec implementation + async fn get_last_sync_timestamp(&self, user_id: i32) -> AppResult>> { + match self { + DatabasePool::Postgres(pool) => { + let row = sqlx::query(r#"SELECT lastsynctime FROM "Users" WHERE userid = $1"#) + .bind(user_id) + .fetch_optional(pool) + .await?; + if let Some(row) = row { - row.try_get::("podcastid")? + Ok(row.try_get("lastsynctime").unwrap_or(None)) } else { - return Err(AppError::external_error(&format!("No YouTube channel found with name {}", channel_name))); + Ok(None) } } - DatabasePool::MySQL(pool) => { - let row = sqlx::query(r#" - SELECT PodcastID - FROM Podcasts - WHERE PodcastName = ? - AND FeedURL = ? - AND UserID = ? - AND IsYouTubeChannel = TRUE - "#) - .bind(channel_name) - .bind(channel_url) - .bind(user_id) - .fetch_optional(pool) - .await?; - + DatabasePool::MySQL(pool) => { + let row = sqlx::query("SELECT LastSyncTime FROM Users WHERE UserID = ?") + .bind(user_id) + .fetch_optional(pool) + .await?; + if let Some(row) = row { - row.try_get::("PodcastID")? + Ok(row.try_get("LastSyncTime").unwrap_or(None)) } else { - return Err(AppError::external_error(&format!("No YouTube channel found with name {}", channel_name))); + Ok(None) } } - }; - - // Remove the channel by ID - self.remove_youtube_channel_by_id(podcast_id, user_id).await + } } - - // Remove YouTube channel by ID - matches Python remove_youtube_channel function exactly - pub async fn remove_youtube_channel_by_id( - &self, - podcast_id: i32, - user_id: i32, - ) -> AppResult<()> { - // First, get all video IDs for the podcast so we can delete the files - let video_ids: Vec = match self { - DatabasePool::Postgres(pool) => { - let rows = sqlx::query(r#"SELECT youtubevideoid FROM "YouTubeVideos" WHERE podcastid = $1"#) - .bind(podcast_id) - .fetch_all(pool) + + // Update last sync timestamp - PROPER GPodder spec implementation for incremental sync + async fn update_last_sync_timestamp(&self, user_id: i32) -> AppResult<()> { + let now = chrono::Utc::now(); + + match self { + DatabasePool::Postgres(pool) => { + sqlx::query(r#"UPDATE "Users" SET lastsynctime = $1 WHERE userid = $2"#) + .bind(now) + .bind(user_id) + .execute(pool) .await?; - - rows.into_iter() - .map(|row| row.try_get::("youtubevideoid").unwrap_or_default()) - .collect() } - DatabasePool::MySQL(pool) => { - let rows = sqlx::query("SELECT YouTubeVideoID FROM YouTubeVideos WHERE PodcastID = ?") - .bind(podcast_id) - .fetch_all(pool) + DatabasePool::MySQL(pool) => { + sqlx::query("UPDATE Users SET LastSyncTime = ? WHERE UserID = ?") + .bind(now) + .bind(user_id) + .execute(pool) .await?; - - rows.into_iter() - .map(|row| row.try_get::("YouTubeVideoID").unwrap_or_default()) - .collect() - } - }; - - // Delete the MP3 files for each video - for video_id in &video_ids { - let file_paths = vec![ - format!("/opt/pinepods/downloads/youtube/{}.mp3", video_id), - format!("/opt/pinepods/downloads/youtube/{}.mp3.mp3", video_id), // In case of double extension - ]; - - for file_path in file_paths { - if tokio::fs::metadata(&file_path).await.is_ok() { - match tokio::fs::remove_file(&file_path).await { - Ok(_) => println!("Deleted file: {}", file_path), - Err(e) => println!("Failed to delete file {}: {}", file_path, e), - } - } } } + + Ok(()) + } - // Delete from the related tables in the correct order + // Clear last sync timestamp - for initial full sync to start fresh + async fn clear_last_sync_timestamp(&self, user_id: i32) -> AppResult<()> { match self { - DatabasePool::Postgres(pool) => { - let delete_queries = vec![ - r#"DELETE FROM "PlaylistContents" WHERE episodeid IN (SELECT videoid FROM "YouTubeVideos" WHERE podcastid = $1)"#, - r#"DELETE FROM "UserEpisodeHistory" WHERE episodeid IN (SELECT videoid FROM "YouTubeVideos" WHERE podcastid = $1)"#, - r#"DELETE FROM "UserVideoHistory" WHERE videoid IN (SELECT videoid FROM "YouTubeVideos" WHERE podcastid = $1)"#, - r#"DELETE FROM "DownloadedEpisodes" WHERE episodeid IN (SELECT videoid FROM "YouTubeVideos" WHERE podcastid = $1)"#, - r#"DELETE FROM "DownloadedVideos" WHERE videoid IN (SELECT videoid FROM "YouTubeVideos" WHERE podcastid = $1)"#, - r#"DELETE FROM "SavedVideos" WHERE videoid IN (SELECT videoid FROM "YouTubeVideos" WHERE podcastid = $1)"#, - r#"DELETE FROM "SavedEpisodes" WHERE episodeid IN (SELECT videoid FROM "YouTubeVideos" WHERE podcastid = $1)"#, - r#"DELETE FROM "EpisodeQueue" WHERE episodeid IN (SELECT videoid FROM "YouTubeVideos" WHERE podcastid = $1)"#, - r#"DELETE FROM "YouTubeVideos" WHERE podcastid = $1"#, - r#"DELETE FROM "Podcasts" WHERE podcastid = $1 AND isyoutubechannel = TRUE"#, - ]; - - for query in delete_queries { - sqlx::query(query) - .bind(podcast_id) - .execute(pool) - .await?; - } - - // Update user stats - sqlx::query(r#"UPDATE "UserStats" SET podcastsadded = podcastsadded - 1 WHERE userid = $1"#) + DatabasePool::Postgres(pool) => { + sqlx::query(r#"UPDATE "Users" SET lastsynctime = NULL WHERE userid = $1"#) .bind(user_id) .execute(pool) .await?; } - DatabasePool::MySQL(pool) => { - let delete_queries = vec![ - "DELETE FROM PlaylistContents WHERE EpisodeID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = ?)", - "DELETE FROM UserEpisodeHistory WHERE EpisodeID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = ?)", - "DELETE FROM UserVideoHistory WHERE VideoID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = ?)", - "DELETE FROM DownloadedEpisodes WHERE EpisodeID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = ?)", - "DELETE FROM DownloadedVideos WHERE VideoID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = ?)", - "DELETE FROM SavedVideos WHERE VideoID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = ?)", - "DELETE FROM SavedEpisodes WHERE EpisodeID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = ?)", - "DELETE FROM EpisodeQueue WHERE EpisodeID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = ?)", - "DELETE FROM YouTubeVideos WHERE PodcastID = ?", - "DELETE FROM Podcasts WHERE PodcastID = ? AND IsYouTubeChannel = TRUE", - ]; - - for query in delete_queries { - sqlx::query(query) - .bind(podcast_id) - .execute(pool) - .await?; - } - - // Update user stats - sqlx::query("UPDATE UserStats SET PodcastsAdded = PodcastsAdded - 1 WHERE UserID = ?") + DatabasePool::MySQL(pool) => { + sqlx::query("UPDATE Users SET LastSyncTime = NULL WHERE UserID = ?") .bind(user_id) .execute(pool) .await?; } } - + Ok(()) } - // Get podcast ID by feed URL and title - for get_podcast_details_dynamic - pub async fn get_podcast_id_by_feed(&self, user_id: i32, feed_url: &str, podcast_title: &str) -> AppResult { + // Get user episode actions since timestamp - CRITICAL for incremental sync performance + async fn get_user_episode_actions_since(&self, user_id: i32, since: chrono::DateTime) -> AppResult> { match self { DatabasePool::Postgres(pool) => { - let row = sqlx::query( - r#"SELECT podcastid FROM "Podcasts" WHERE feedurl = $1 AND userid = $2"# - ) - .bind(feed_url) + let rows = sqlx::query(r#" + SELECT + p.feedurl as podcast, + e.episodeurl as episode, + eh.listenduration as position, + e.episodeduration as total, + CASE + WHEN eh.listenduration > 0 THEN 'play' + WHEN d.episodeid IS NOT NULL THEN 'download' + ELSE 'new' + END as action, + COALESCE(eh.listendate, '1970-01-01'::timestamp) as timestamp + FROM "Episodes" e + JOIN "Podcasts" p ON e.podcastid = p.podcastid AND p.userid = $1 + LEFT JOIN "UserEpisodeHistory" eh ON e.episodeid = eh.episodeid AND eh.userid = $1 + LEFT JOIN "DownloadedEpisodes" d ON e.episodeid = d.episodeid AND d.userid = $1 + WHERE (eh.userid = $1 OR d.userid = $1) + AND COALESCE(eh.listendate, '1970-01-01'::timestamp) > $2 + ORDER BY timestamp DESC + "#) .bind(user_id) - .fetch_optional(pool) + .bind(since) + .fetch_all(pool) .await?; - if let Some(row) = row { - Ok(row.try_get("podcastid")?) - } else { - Err(AppError::not_found("Podcast not found")) + let mut actions = Vec::new(); + for row in rows { + // Handle PostgreSQL TIMESTAMP (not TIMESTAMPTZ) column + let naive_timestamp: chrono::NaiveDateTime = row.try_get("timestamp")?; + let utc_timestamp = chrono::DateTime::::from_naive_utc_and_offset(naive_timestamp, chrono::Utc); + let timestamp_str = utc_timestamp.to_rfc3339(); + + let action: String = row.try_get("action")?; + let position: Option = row.try_get("position").ok().flatten(); + let total: Option = row.try_get("total").ok().flatten(); + + // For play actions, include started, position, and total (required by AntennaPod) + let mut action_json = serde_json::json!({ + "podcast": row.try_get::("podcast")?, + "episode": row.try_get::("episode")?, + "action": action, + "timestamp": timestamp_str, + }); + + if action == "play" && position.is_some() && total.is_some() { + action_json["started"] = serde_json::json!(0); // Always start from 0 + action_json["position"] = serde_json::json!(position.unwrap()); + action_json["total"] = serde_json::json!(total.unwrap()); + } + + actions.push(action_json); } + Ok(actions) } DatabasePool::MySQL(pool) => { - let row = sqlx::query( - "SELECT PodcastID FROM Podcasts WHERE FeedURL = ? AND UserID = ?" - ) - .bind(feed_url) + let rows = sqlx::query(" + SELECT + p.FeedURL as podcast, + e.EpisodeURL as episode, + eh.ListenDuration as position, + e.EpisodeDuration as total, + CASE + WHEN eh.ListenDuration > 0 THEN 'play' + WHEN d.EpisodeID IS NOT NULL THEN 'download' + ELSE 'new' + END as action, + COALESCE(eh.ListenDate, '1970-01-01 00:00:00') as timestamp + FROM Episodes e + JOIN Podcasts p ON e.PodcastID = p.PodcastID AND p.UserID = ? + LEFT JOIN UserEpisodeHistory eh ON e.EpisodeID = eh.EpisodeID AND eh.UserID = ? + LEFT JOIN DownloadedEpisodes d ON e.EpisodeID = d.EpisodeID AND d.UserID = ? + WHERE (eh.UserID = ? OR d.UserID = ?) + AND COALESCE(eh.ListenDate, '1970-01-01 00:00:00') > ? + ORDER BY timestamp DESC + ") .bind(user_id) - .fetch_optional(pool) + .bind(user_id) + .bind(user_id) + .bind(user_id) + .bind(user_id) + .bind(since) + .fetch_all(pool) .await?; - if let Some(row) = row { - Ok(row.try_get("PodcastID")?) - } else { - Err(AppError::not_found("Podcast not found")) + let mut actions = Vec::new(); + for row in rows { + // Handle MySQL DATETIME column + let naive_timestamp: chrono::NaiveDateTime = row.try_get("timestamp")?; + let utc_timestamp = chrono::DateTime::::from_naive_utc_and_offset(naive_timestamp, chrono::Utc); + let timestamp_str = utc_timestamp.to_rfc3339(); + + let action: String = row.try_get("action")?; + let position: Option = row.try_get("position").ok().flatten(); + let total: Option = row.try_get("total").ok().flatten(); + + // For play actions, include started, position, and total (required by AntennaPod) + let mut action_json = serde_json::json!({ + "podcast": row.try_get::("podcast")?, + "episode": row.try_get::("episode")?, + "action": action, + "timestamp": timestamp_str, + }); + + if action == "play" && position.is_some() && total.is_some() { + action_json["started"] = serde_json::json!(0); // Always start from 0 + action_json["position"] = serde_json::json!(position.unwrap()); + action_json["total"] = serde_json::json!(total.unwrap()); + } + + actions.push(action_json); } + Ok(actions) } } } - // Get raw podcast details - returns all fields as JSON for get_podcast_details_dynamic - pub async fn get_podcast_details_raw(&self, user_id: i32, podcast_id: i32) -> AppResult> { + // Get all user episode actions - fallback for first sync + async fn get_user_episode_actions(&self, user_id: i32) -> AppResult> { + // Use since timestamp of epoch (1970) to get all actions + let epoch = chrono::DateTime::from_timestamp(0, 0).unwrap_or_else(chrono::Utc::now); + self.get_user_episode_actions_since(user_id, epoch).await + } +} + +#[derive(Debug)] +struct RssEpisode { + title: String, + description: String, + url: String, + pub_date: String, + duration: Option, + author: Option, + artwork_url: Option, +} + +impl DatabasePool { + // Get all users with nextcloud sync enabled - matches Python get_all_users_with_nextcloud_sync + pub async fn get_all_users_with_nextcloud_sync(&self) -> AppResult> { match self { DatabasePool::Postgres(pool) => { - let row = sqlx::query( - r#"SELECT * FROM "Podcasts" WHERE podcastid = $1 AND userid = $2"# + let user_ids: Vec = sqlx::query_scalar( + r#"SELECT userid FROM "Users" WHERE pod_sync_type = 'nextcloud'"# ) - .bind(podcast_id) - .bind(user_id) - .fetch_optional(pool) + .fetch_all(pool) .await?; - - if let Some(row) = row { - let mut details = serde_json::Map::new(); - - details.insert("podcastname".to_string(), serde_json::Value::String(row.try_get::("podcastname").unwrap_or_default())); - details.insert("feedurl".to_string(), serde_json::Value::String(row.try_get::("feedurl").unwrap_or_default())); - details.insert("description".to_string(), serde_json::Value::String(row.try_get::("description").unwrap_or_default())); - details.insert("author".to_string(), serde_json::Value::String(row.try_get::("author").unwrap_or_default())); - details.insert("artworkurl".to_string(), serde_json::Value::String(row.try_get::("artworkurl").unwrap_or_default())); - details.insert("explicit".to_string(), serde_json::Value::Bool(row.try_get::("explicit").unwrap_or(false))); - details.insert("episodecount".to_string(), serde_json::Value::Number(serde_json::Number::from(row.try_get::("episodecount").unwrap_or(0)))); - let categories_str = row.try_get::("categories").unwrap_or_default(); - let categories_parsed = self.parse_categories_json(&categories_str).unwrap_or_default(); - details.insert("categories".to_string(), serde_json::to_value(categories_parsed).unwrap_or(serde_json::Value::Object(serde_json::Map::new()))); - details.insert("websiteurl".to_string(), serde_json::Value::String(row.try_get::("websiteurl").unwrap_or_default())); - details.insert("podcastindexid".to_string(), serde_json::Value::Number(serde_json::Number::from(row.try_get::("podcastindexid").unwrap_or(0)))); - details.insert("isyoutubechannel".to_string(), serde_json::Value::Bool(row.try_get::("isyoutubechannel").unwrap_or(false))); - - Ok(Some(serde_json::Value::Object(details))) - } else { - Ok(None) - } + Ok(user_ids) } DatabasePool::MySQL(pool) => { - let row = sqlx::query( - "SELECT * FROM Podcasts WHERE PodcastID = ? AND UserID = ?" + let user_ids: Vec = sqlx::query_scalar( + "SELECT UserID FROM Users WHERE Pod_Sync_Type = 'nextcloud'" ) - .bind(podcast_id) - .bind(user_id) - .fetch_optional(pool) + .fetch_all(pool) .await?; - - if let Some(row) = row { - let mut details = serde_json::Map::new(); - - details.insert("podcastname".to_string(), serde_json::Value::String(row.try_get::("PodcastName").unwrap_or_default())); - details.insert("feedurl".to_string(), serde_json::Value::String(row.try_get::("FeedURL").unwrap_or_default())); - details.insert("description".to_string(), serde_json::Value::String(row.try_get::("Description").unwrap_or_default())); - details.insert("author".to_string(), serde_json::Value::String(row.try_get::("Author").unwrap_or_default())); - details.insert("artworkurl".to_string(), serde_json::Value::String(row.try_get::("ArtworkURL").unwrap_or_default())); - details.insert("explicit".to_string(), serde_json::Value::Bool(row.try_get::("Explicit").unwrap_or(false))); - details.insert("episodecount".to_string(), serde_json::Value::Number(serde_json::Number::from(row.try_get::("EpisodeCount").unwrap_or(0)))); - let categories_str = row.try_get::("Categories").unwrap_or_default(); - let categories_parsed = self.parse_categories_json(&categories_str).unwrap_or_default(); - details.insert("categories".to_string(), serde_json::to_value(categories_parsed).unwrap_or(serde_json::Value::Object(serde_json::Map::new()))); - details.insert("websiteurl".to_string(), serde_json::Value::String(row.try_get::("WebsiteURL").unwrap_or_default())); - details.insert("podcastindexid".to_string(), serde_json::Value::Number(serde_json::Number::from(row.try_get::("PodcastIndexID").unwrap_or(0)))); - details.insert("isyoutubechannel".to_string(), serde_json::Value::Bool(row.try_get::("IsYouTubeChannel").unwrap_or(false))); - - Ok(Some(serde_json::Value::Object(details))) - } else { - Ok(None) - } + Ok(user_ids) } } } - // Get podcast values from feed - for get_podcast_details_dynamic when podcast is not added - pub async fn get_podcast_values_from_feed(&self, feed_url: &str, user_id: i32, display_only: bool) -> AppResult { - // Use the real get_podcast_values function that exists in the codebase - let podcast_values = self.get_podcast_values(feed_url, user_id, None, None).await?; + // Complete implementation of sync_with_nextcloud_for_user - matches Python nextcloud sync functionality + pub async fn sync_with_nextcloud_for_user(&self, user_id: i32) -> AppResult { + tracing::info!("Starting Nextcloud sync for user {}", user_id); - // Convert HashMap to the expected JSON format for get_podcast_details_dynamic - let response = serde_json::json!({ - "pod_title": podcast_values.get("podcastname").unwrap_or(&"Unknown Podcast".to_string()), - "pod_feed_url": feed_url, - "pod_description": podcast_values.get("description").unwrap_or(&"".to_string()), - "pod_author": podcast_values.get("author").unwrap_or(&"Unknown Author".to_string()), - "pod_artwork": podcast_values.get("artworkurl").unwrap_or(&"/static/assets/default-podcast.png".to_string()), - "pod_explicit": podcast_values.get("explicit").unwrap_or(&"False".to_string()) == "True", - "pod_episode_count": podcast_values.get("episodecount").unwrap_or(&"0".to_string()).parse::().unwrap_or(0), - "categories": podcast_values.get("categories").unwrap_or(&"{}".to_string()), - "pod_website": podcast_values.get("websiteurl").unwrap_or(&"".to_string()), - }); - - Ok(response) - } - - // Update feed cutoff days - for update_feed_cutoff_days endpoint - pub async fn update_feed_cutoff_days(&self, podcast_id: i32, user_id: i32, feed_cutoff_days: i32) -> AppResult { - match self { + // Get user's Nextcloud configuration + let gpodder_status = self.gpodder_get_status(user_id).await?; + + // Only proceed if sync type is nextcloud + if gpodder_status.sync_type != "nextcloud" { + tracing::info!("User {} does not have Nextcloud sync enabled", user_id); + return Ok(false); + } + + // Get Nextcloud credentials from database + let (gpodder_url, username, encrypted_token) = match self { DatabasePool::Postgres(pool) => { - // First verify podcast exists and belongs to user - let existing = sqlx::query(r#"SELECT podcastid FROM "Podcasts" WHERE podcastid = $1 AND userid = $2"#) - .bind(podcast_id) + let row = sqlx::query(r#"SELECT gpodderurl, gpodderloginname, gpoddertoken FROM "Users" WHERE userid = $1"#) .bind(user_id) .fetch_optional(pool) .await?; - - if existing.is_none() { - return Ok(false); + + if let Some(row) = row { + let url: Option = row.try_get("gpodderurl")?; + let login: Option = row.try_get("gpodderloginname")?; + let token: Option = row.try_get("gpoddertoken")?; + + ( + url.ok_or_else(|| AppError::internal("Nextcloud URL not configured"))?, + login.ok_or_else(|| AppError::internal("Nextcloud username not configured"))?, + token.ok_or_else(|| AppError::internal("Nextcloud token not configured"))? + ) + } else { + return Err(AppError::not_found("User not found")); } - - // Update the feed cutoff days - let result = sqlx::query(r#"UPDATE "Podcasts" SET feedcutoffdays = $1 WHERE podcastid = $2 AND userid = $3"#) - .bind(feed_cutoff_days) - .bind(podcast_id) - .bind(user_id) - .execute(pool) - .await?; - - Ok(result.rows_affected() > 0) } DatabasePool::MySQL(pool) => { - // First verify podcast exists and belongs to user - let existing = sqlx::query("SELECT PodcastID FROM Podcasts WHERE PodcastID = ? AND UserID = ?") - .bind(podcast_id) + let row = sqlx::query("SELECT GpodderUrl, GpodderLoginName, GpodderToken FROM Users WHERE UserID = ?") .bind(user_id) .fetch_optional(pool) .await?; - - if existing.is_none() { - return Ok(false); - } - - // Update the feed cutoff days - let result = sqlx::query("UPDATE Podcasts SET FeedCutoffDays = ? WHERE PodcastID = ? AND UserID = ?") - .bind(feed_cutoff_days) - .bind(podcast_id) - .bind(user_id) - .execute(pool) - .await?; - - Ok(result.rows_affected() > 0) - } - } - } - - // Bulk episode operations for efficient batch processing - pub async fn bulk_mark_episodes_completed(&self, episode_ids: Vec, user_id: i32, is_youtube: bool) -> AppResult<(i32, i32)> { - if episode_ids.is_empty() { - return Ok((0, 0)); - } - - let mut processed = 0; - let mut failed = 0; - - match self { - DatabasePool::Postgres(pool) => { - let mut tx = pool.begin().await?; - - if is_youtube { - for episode_id in episode_ids { - match self.mark_episode_completed(episode_id, user_id, is_youtube).await { - Ok(_) => processed += 1, - Err(_) => failed += 1, - } - } - } else { - // Batch update regular episodes - let episode_ids_str: Vec = episode_ids.iter().map(|id| id.to_string()).collect(); - let ids_clause = episode_ids_str.join(","); - let query = format!( - r#"UPDATE "Episodes" SET completed = TRUE WHERE episodeid IN ({})"#, - ids_clause - ); + if let Some(row) = row { + let url: Option = row.try_get("GpodderUrl")?; + let login: Option = row.try_get("GpodderLoginName")?; + let token: Option = row.try_get("GpodderToken")?; - let result = sqlx::query(&query).execute(&mut *tx).await?; - processed = result.rows_affected() as i32; + ( + url.ok_or_else(|| AppError::internal("Nextcloud URL not configured"))?, + login.ok_or_else(|| AppError::internal("Nextcloud username not configured"))?, + token.ok_or_else(|| AppError::internal("Nextcloud token not configured"))? + ) + } else { + return Err(AppError::not_found("User not found")); } - - tx.commit().await?; } - DatabasePool::MySQL(pool) => { - let mut tx = pool.begin().await?; - - if is_youtube { - for episode_id in episode_ids { - match self.mark_episode_completed(episode_id, user_id, is_youtube).await { - Ok(_) => processed += 1, - Err(_) => failed += 1, + }; + + // Decrypt token using existing decrypt_password method + let password = self.decrypt_password(&encrypted_token).await?; + + // Get last sync timestamp for incremental sync + let since_timestamp = if let Some(last_sync) = self.get_last_sync_timestamp(user_id).await? { + last_sync.timestamp() + } else { + 0 + }; + + // Build Nextcloud API endpoint URLs + let base_url = if gpodder_url.ends_with('/') { + gpodder_url.trim_end_matches('/').to_string() + } else { + gpodder_url.clone() + }; + + let subscriptions_url = format!("{}/index.php/apps/gpoddersync/subscriptions", base_url); + let episode_action_url = format!("{}/index.php/apps/gpoddersync/episode_action", base_url); + + let client = reqwest::Client::new(); + let mut has_changes = false; + + // Sync subscriptions from Nextcloud + let subscriptions_response = client + .get(&subscriptions_url) + .basic_auth(&username, Some(&password)) + .query(&[("since", since_timestamp.to_string())]) + .send() + .await + .map_err(|e| AppError::internal(&format!("Failed to fetch Nextcloud subscriptions: {}", e)))?; + + if subscriptions_response.status().is_success() { + let subscription_data: serde_json::Value = subscriptions_response.json().await + .map_err(|e| AppError::internal(&format!("Failed to parse subscription response: {}", e)))?; + + // Process subscription changes + if let Some(add_list) = subscription_data.get("add").and_then(|v| v.as_array()) { + for url in add_list { + if let Some(podcast_url) = url.as_str() { + tracing::info!("Adding Nextcloud subscription: {}", podcast_url); + if let Err(e) = self.add_podcast_from_url(user_id, podcast_url, None).await { + tracing::error!("Failed to add podcast {}: {}", podcast_url, e); + } else { + has_changes = true; } } - } else { - // Batch update regular episodes - let episode_ids_str: Vec = episode_ids.iter().map(|id| id.to_string()).collect(); - let ids_clause = episode_ids_str.join(","); - - let query = format!( - "UPDATE Episodes SET Completed = TRUE WHERE EpisodeID IN ({})", - ids_clause - ); - - let result = sqlx::query(&query).execute(&mut *tx).await?; - processed = result.rows_affected() as i32; } - - tx.commit().await?; } - } - - Ok((processed, failed)) - } - - pub async fn bulk_save_episodes(&self, episode_ids: Vec, user_id: i32, is_youtube: bool) -> AppResult<(i32, i32)> { - if episode_ids.is_empty() { - return Ok((0, 0)); - } - - let mut processed = 0; - let mut failed = 0; - - match self { - DatabasePool::Postgres(pool) => { - let mut tx = pool.begin().await?; - - if is_youtube { - for episode_id in episode_ids { - // Check if already saved to avoid duplicates - let existing = sqlx::query( - r#"SELECT "SaveID" FROM "SavedVideos" WHERE "VideoID" = $1 AND "UserID" = $2"# - ) - .bind(episode_id) - .bind(user_id) - .fetch_optional(&mut *tx) - .await?; - - if existing.is_none() { - match sqlx::query( - r#"INSERT INTO "SavedVideos" ("VideoID", "UserID") VALUES ($1, $2)"# - ) - .bind(episode_id) - .bind(user_id) - .execute(&mut *tx) - .await { - Ok(_) => processed += 1, - Err(_) => failed += 1, - } - } - } - } else { - for episode_id in episode_ids { - // Check if already saved to avoid duplicates - let existing = sqlx::query( - r#"SELECT saveid FROM "SavedEpisodes" WHERE episodeid = $1 AND userid = $2"# - ) - .bind(episode_id) - .bind(user_id) - .fetch_optional(&mut *tx) - .await?; - - if existing.is_none() { - match sqlx::query( - r#"INSERT INTO "SavedEpisodes" (episodeid, userid) VALUES ($1, $2)"# - ) - .bind(episode_id) - .bind(user_id) - .execute(&mut *tx) - .await { - Ok(_) => processed += 1, - Err(_) => failed += 1, - } + + if let Some(remove_list) = subscription_data.get("remove").and_then(|v| v.as_array()) { + for url in remove_list { + if let Some(podcast_url) = url.as_str() { + tracing::info!("Removing Nextcloud subscription: {}", podcast_url); + if let Err(e) = self.remove_podcast_by_url(user_id, podcast_url).await { + tracing::error!("Failed to remove podcast {}: {}", podcast_url, e); + } else { + has_changes = true; } } } - - tx.commit().await?; } - DatabasePool::MySQL(pool) => { - let mut tx = pool.begin().await?; - - if is_youtube { - for episode_id in episode_ids { - let existing = sqlx::query( - "SELECT SaveID FROM SavedVideos WHERE VideoID = ? AND UserID = ?" - ) - .bind(episode_id) - .bind(user_id) - .fetch_optional(&mut *tx) - .await?; - - if existing.is_none() { - match sqlx::query( - "INSERT INTO SavedVideos (VideoID, UserID) VALUES (?, ?)" - ) - .bind(episode_id) - .bind(user_id) - .execute(&mut *tx) - .await { - Ok(_) => processed += 1, - Err(_) => failed += 1, - } - } - } - } else { - for episode_id in episode_ids { - let existing = sqlx::query( - "SELECT SaveID FROM SavedEpisodes WHERE EpisodeID = ? AND UserID = ?" - ) - .bind(episode_id) - .bind(user_id) - .fetch_optional(&mut *tx) - .await?; - - if existing.is_none() { - match sqlx::query( - "INSERT INTO SavedEpisodes (EpisodeID, UserID) VALUES (?, ?)" - ) - .bind(episode_id) - .bind(user_id) - .execute(&mut *tx) - .await { - Ok(_) => processed += 1, - Err(_) => failed += 1, - } - } + } + + // Sync episode actions from Nextcloud + let episode_actions_response = client + .get(&episode_action_url) + .basic_auth(&username, Some(&password)) + .query(&[("since", since_timestamp.to_string())]) + .send() + .await + .map_err(|e| AppError::internal(&format!("Failed to fetch Nextcloud episode actions: {}", e)))?; + + if episode_actions_response.status().is_success() { + let episode_actions_data: serde_json::Value = episode_actions_response.json().await + .map_err(|e| AppError::internal(&format!("Failed to parse episode actions response: {}", e)))?; + + if let Some(actions) = episode_actions_data.get("actions").and_then(|v| v.as_array()) { + for action in actions { + if let Err(e) = self.process_nextcloud_episode_action(user_id, action).await { + tracing::error!("Failed to process episode action: {}", e); + } else { + has_changes = true; } } - - tx.commit().await?; } } - - Ok((processed, failed)) - } - - pub async fn bulk_queue_episodes(&self, episode_ids: Vec, user_id: i32, is_youtube: bool) -> AppResult<(i32, i32)> { - if episode_ids.is_empty() { - return Ok((0, 0)); + + // Update last sync timestamp + if let Err(e) = self.update_last_sync_timestamp(user_id).await { + tracing::error!("Failed to update sync timestamp for user {}: {}", user_id, e); } - - let mut processed = 0; - let mut failed = 0; - - match self { - DatabasePool::Postgres(pool) => { - let mut tx = pool.begin().await?; - - if is_youtube { - for episode_id in episode_ids { - // Check if already queued to avoid duplicates - let existing = sqlx::query( - r#"SELECT "QueueID" FROM "QueuedVideos" WHERE "VideoID" = $1 AND "UserID" = $2"# - ) - .bind(episode_id) - .bind(user_id) - .fetch_optional(&mut *tx) - .await?; - - if existing.is_none() { - match sqlx::query( - r#"INSERT INTO "QueuedVideos" ("VideoID", "UserID") VALUES ($1, $2)"# - ) - .bind(episode_id) - .bind(user_id) - .execute(&mut *tx) - .await { - Ok(_) => processed += 1, - Err(_) => failed += 1, - } - } - } - } else { - // Get max queue position for user - let max_pos_row = sqlx::query( - r#"SELECT COALESCE(MAX(queueposition), 0) as max_pos FROM "EpisodeQueue" WHERE userid = $1"# - ) + + tracing::info!("Nextcloud sync completed for user {} - changes: {}", user_id, has_changes); + Ok(has_changes) + } + + // Process individual episode action from Nextcloud + async fn process_nextcloud_episode_action(&self, user_id: i32, action: &serde_json::Value) -> AppResult<()> { + let episode_url = action.get("episode") + .and_then(|v| v.as_str()) + .ok_or_else(|| AppError::internal("Missing episode URL in episode action"))?; + + let action_type = action.get("action") + .and_then(|v| v.as_str()) + .ok_or_else(|| AppError::internal("Missing action type in episode action"))?; + + // Find the episode by URL + let episode_id = match self.get_episode_id_by_url(episode_url).await { + Ok(Some(id)) => id, + Ok(None) => { + tracing::warn!("Episode not found for URL: {}", episode_url); + return Ok(()); + } + Err(_) => { + tracing::warn!("Error finding episode for URL: {}", episode_url); + return Ok(()); + } + }; + + match action_type { + "play" => { + if let Some(position) = action.get("position").and_then(|v| v.as_i64()) { + self.save_episode_history(user_id, episode_id, position as i32, 0).await?; + } + } + "download" => { + self.mark_episode_completed(episode_id, user_id, false).await?; + } + "delete" => { + // Remove episode from user's history + self.remove_episode_from_history(user_id, episode_id).await?; + } + _ => { + tracing::debug!("Unknown action type: {}", action_type); + } + } + + Ok(()) + } + + // Add podcast from URL - used by Nextcloud sync + pub async fn add_podcast_from_url(&self, user_id: i32, feed_url: &str, _feed_cutoff: Option) -> AppResult<()> { + // Check if podcast already exists for this user + if self.podcast_exists_for_user(user_id, feed_url).await? { + tracing::info!("Podcast {} already exists for user {}", feed_url, user_id); + return Ok(()); + } + + // Get podcast metadata from feed URL using existing function + let podcast_values = self.get_podcast_values(feed_url, user_id, None, None).await?; + + // Add podcast using existing function + let _result = self.add_podcast_from_values(&podcast_values, user_id, 30, None, None).await?; + + tracing::info!("Successfully added podcast {} for user {}", feed_url, user_id); + Ok(()) + } + + // Remove podcast by URL - used by Nextcloud sync + pub async fn remove_podcast_by_url(&self, user_id: i32, feed_url: &str) -> AppResult<()> { + match self { + DatabasePool::Postgres(pool) => { + let result = sqlx::query(r#"DELETE FROM "Podcasts" WHERE feedurl = $1 AND userid = $2"#) + .bind(feed_url) .bind(user_id) - .fetch_one(&mut *tx) + .execute(pool) .await?; - let mut max_pos: i32 = max_pos_row.try_get("max_pos")?; - - for episode_id in episode_ids { - // Check if already queued to avoid duplicates - let existing = sqlx::query( - r#"SELECT queueid FROM "EpisodeQueue" WHERE episodeid = $1 AND userid = $2 AND is_youtube = $3"# - ) - .bind(episode_id) - .bind(user_id) - .bind(is_youtube) - .fetch_optional(&mut *tx) - .await?; - - if existing.is_none() { - max_pos += 1; - match sqlx::query( - r#"INSERT INTO "EpisodeQueue" (episodeid, userid, queueposition, is_youtube) VALUES ($1, $2, $3, $4)"# - ) - .bind(episode_id) - .bind(user_id) - .bind(max_pos) - .bind(is_youtube) - .execute(&mut *tx) - .await { - Ok(_) => processed += 1, - Err(_) => failed += 1, - } - } - } + if result.rows_affected() > 0 { + tracing::info!("Successfully removed podcast {} for user {}", feed_url, user_id); + } else { + tracing::info!("Podcast {} not found for user {}", feed_url, user_id); } - - tx.commit().await?; } DatabasePool::MySQL(pool) => { - let mut tx = pool.begin().await?; - - if is_youtube { - for episode_id in episode_ids { - let existing = sqlx::query( - "SELECT QueueID FROM QueuedVideos WHERE VideoID = ? AND UserID = ?" - ) - .bind(episode_id) - .bind(user_id) - .fetch_optional(&mut *tx) - .await?; - - if existing.is_none() { - match sqlx::query( - "INSERT INTO QueuedVideos (VideoID, UserID) VALUES (?, ?)" - ) - .bind(episode_id) - .bind(user_id) - .execute(&mut *tx) - .await { - Ok(_) => processed += 1, - Err(_) => failed += 1, - } - } - } - } else { - // Get max queue position for user - let max_pos_row = sqlx::query( - "SELECT COALESCE(MAX(QueuePosition), 0) as max_pos FROM EpisodeQueue WHERE UserID = ?" - ) + let result = sqlx::query("DELETE FROM Podcasts WHERE FeedURL = ? AND UserID = ?") + .bind(feed_url) .bind(user_id) - .fetch_one(&mut *tx) + .execute(pool) .await?; - let mut max_pos: i32 = max_pos_row.try_get("max_pos")?; - - for episode_id in episode_ids { - let existing = sqlx::query( - "SELECT QueueID FROM EpisodeQueue WHERE EpisodeID = ? AND UserID = ? AND is_youtube = ?" - ) - .bind(episode_id) - .bind(user_id) - .bind(is_youtube) - .fetch_optional(&mut *tx) - .await?; - - if existing.is_none() { - max_pos += 1; - match sqlx::query( - "INSERT INTO EpisodeQueue (EpisodeID, UserID, QueuePosition, is_youtube) VALUES (?, ?, ?, ?)" - ) - .bind(episode_id) - .bind(user_id) - .bind(max_pos) - .bind(is_youtube) - .execute(&mut *tx) - .await { - Ok(_) => processed += 1, - Err(_) => failed += 1, - } - } - } + if result.rows_affected() > 0 { + tracing::info!("Successfully removed podcast {} for user {}", feed_url, user_id); + } else { + tracing::info!("Podcast {} not found for user {}", feed_url, user_id); } - - tx.commit().await?; } } - - Ok((processed, failed)) + Ok(()) } - // Bulk delete downloaded episodes - efficient batch processing for mass deletion - pub async fn bulk_delete_downloaded_episodes(&self, episode_ids: Vec, user_id: i32, is_youtube: bool) -> AppResult<(i32, i32)> { - if episode_ids.is_empty() { - return Ok((0, 0)); - } - - let mut processed = 0; - let mut failed = 0; - + // Get episode ID by URL - used by Nextcloud episode actions + pub async fn get_episode_id_by_url(&self, episode_url: &str) -> AppResult> { match self { DatabasePool::Postgres(pool) => { - let mut tx = pool.begin().await?; - - if is_youtube { - // Delete YouTube videos from DownloadedEpisodes (they use the same table but different logic) - for episode_id in episode_ids { - match sqlx::query( - r#"DELETE FROM "DownloadedEpisodes" WHERE episodeid = $1 AND userid = $2"# - ) - .bind(episode_id) - .bind(user_id) - .execute(&mut *tx) - .await { - Ok(result) => { - if result.rows_affected() > 0 { - processed += 1; - } else { - failed += 1; // Episode wasn't downloaded by this user - } - }, - Err(_) => failed += 1, - } - } - } else { - // Batch delete regular episodes using IN clause for efficiency - let episode_ids_str: Vec = episode_ids.iter().map(|id| id.to_string()).collect(); - let ids_clause = episode_ids_str.join(","); - - let query = format!( - r#"DELETE FROM "DownloadedEpisodes" WHERE episodeid IN ({}) AND userid = $1"#, - ids_clause - ); + let row = sqlx::query(r#"SELECT episodeid FROM "Episodes" WHERE episodeurl = $1 LIMIT 1"#) + .bind(episode_url) + .fetch_optional(pool) + .await?; - let result = sqlx::query(&query) - .bind(user_id) - .execute(&mut *tx) - .await?; - processed = result.rows_affected() as i32; - failed = episode_ids.len() as i32 - processed; // Assume failures are episodes not found + if let Some(row) = row { + Ok(Some(row.try_get("episodeid")?)) + } else { + Ok(None) } - - tx.commit().await?; } DatabasePool::MySQL(pool) => { - let mut tx = pool.begin().await?; - - if is_youtube { - // Delete YouTube videos from DownloadedEpisodes - for episode_id in episode_ids { - match sqlx::query( - "DELETE FROM DownloadedEpisodes WHERE EpisodeID = ? AND UserID = ?" - ) - .bind(episode_id) - .bind(user_id) - .execute(&mut *tx) - .await { - Ok(result) => { - if result.rows_affected() > 0 { - processed += 1; - } else { - failed += 1; // Episode wasn't downloaded by this user - } - }, - Err(_) => failed += 1, - } - } - } else { - // Batch delete regular episodes using IN clause for efficiency - let episode_ids_str: Vec = episode_ids.iter().map(|id| id.to_string()).collect(); - let ids_clause = episode_ids_str.join(","); - - let query = format!( - "DELETE FROM DownloadedEpisodes WHERE EpisodeID IN ({}) AND UserID = ?", - ids_clause - ); + let row = sqlx::query("SELECT EpisodeID FROM Episodes WHERE EpisodeURL = ? LIMIT 1") + .bind(episode_url) + .fetch_optional(pool) + .await?; - let result = sqlx::query(&query) - .bind(user_id) - .execute(&mut *tx) - .await?; - processed = result.rows_affected() as i32; - failed = episode_ids.len() as i32 - processed; // Assume failures are episodes not found + if let Some(row) = row { + Ok(Some(row.try_get("EpisodeID")?)) + } else { + Ok(None) } - - tx.commit().await?; } } + } + + // Save episode history - used by Nextcloud episode actions + pub async fn save_episode_history(&self, user_id: i32, episode_id: i32, position: i32, _total_time: i32) -> AppResult<()> { + match self { + DatabasePool::Postgres(pool) => { + sqlx::query(r#" + INSERT INTO "UserEpisodeHistory" (userid, episodeid, listenduration, episodecompleted, episodeprogress) + VALUES ($1, $2, $3, FALSE, $4) + ON CONFLICT (userid, episodeid) + DO UPDATE SET listenduration = $3, episodeprogress = $4 + "#) + .bind(user_id) + .bind(episode_id) + .bind(position) + .bind(position) + .execute(pool) + .await?; + } + DatabasePool::MySQL(pool) => { + sqlx::query(r#" + INSERT INTO UserEpisodeHistory (UserID, EpisodeID, ListenDuration, EpisodeCompleted, EpisodeProgress) + VALUES (?, ?, ?, FALSE, ?) + ON DUPLICATE KEY UPDATE ListenDuration = ?, EpisodeProgress = ? + "#) + .bind(user_id) + .bind(episode_id) + .bind(position) + .bind(position) + .bind(position) + .bind(position) + .execute(pool) + .await?; + } + } + Ok(()) + } - Ok((processed, failed)) + // Remove episode from history - used by Nextcloud episode actions + pub async fn remove_episode_from_history(&self, user_id: i32, episode_id: i32) -> AppResult<()> { + match self { + DatabasePool::Postgres(pool) => { + sqlx::query(r#"DELETE FROM "UserEpisodeHistory" WHERE userid = $1 AND episodeid = $2"#) + .bind(user_id) + .bind(episode_id) + .execute(pool) + .await?; + } + DatabasePool::MySQL(pool) => { + sqlx::query("DELETE FROM UserEpisodeHistory WHERE UserID = ? AND EpisodeID = ?") + .bind(user_id) + .bind(episode_id) + .execute(pool) + .await?; + } + } + Ok(()) } - // Set up internal gpodder sync - matches Python set_gpodder_internal_sync function exactly - pub async fn set_gpodder_internal_sync(&self, user_id: i32) -> AppResult { + // Remove GPodder sync settings for a user - matches Python remove_gpodder_settings function exactly + pub async fn remove_gpodder_settings(&self, user_id: i32) -> AppResult { match self { DatabasePool::Postgres(pool) => { - // Get the username and current sync type - let user_row = sqlx::query(r#"SELECT username, pod_sync_type FROM "Users" WHERE userid = $1"#) + let mut tx = pool.begin().await?; + + // First delete any device records + sqlx::query(r#"DELETE FROM "GpodderDevices" WHERE userid = $1"#) .bind(user_id) - .fetch_optional(pool) + .execute(&mut *tx) .await?; - let (username, current_sync_type) = if let Some(row) = user_row { - let username: String = row.try_get("username")?; - let sync_type: Option = row.try_get("pod_sync_type")?; - (username, sync_type.unwrap_or_else(|| "None".to_string())) - } else { - return Err(AppError::not_found("User not found")); - }; - - // Generate new sync type based on current - let new_sync_type = match current_sync_type.as_str() { - "external" => "both", - "None" | "" => "gpodder", - _ => ¤t_sync_type, - }; - - // Generate a secure internal token (64 characters alphanumeric) - use rand::{distr::Alphanumeric, Rng}; - let internal_token: String = rand::thread_rng() - .sample_iter(&Alphanumeric) - .take(64) - .map(char::from) - .collect(); - - let local_gpodder_url = "http://localhost:8042"; - - // Update user with internal gpodder settings + sqlx::query(r#"DELETE FROM "GpodderSyncState" WHERE userid = $1"#) + .bind(user_id) + .execute(&mut *tx) + .await?; + + // Then clear GPodder settings from user record sqlx::query(r#" - UPDATE "Users" - SET gpodderurl = $1, gpoddertoken = $2, gpodderloginname = $3, pod_sync_type = $4 - WHERE userid = $5 + UPDATE "Users" + SET gpodderurl = '', gpodderloginname = '', gpoddertoken = '', pod_sync_type = 'None' + WHERE userid = $1 "#) - .bind(local_gpodder_url) - .bind(&internal_token) - .bind(&username) - .bind(new_sync_type) - .bind(user_id) - .execute(pool) - .await?; - - // Create default device name - let default_device_name = format!("pinepods-internal-{}", user_id); + .bind(user_id) + .execute(&mut *tx) + .await?; - // Create device via gPodder API (matches Python version exactly) - match self.create_device_via_gpodder_api(local_gpodder_url, &username, &internal_token, &default_device_name).await { - Ok(device_id) => { - Ok(serde_json::json!({ - "device_name": default_device_name, - "device_id": device_id, - "success": true - })) - } - Err(e) => { - tracing::warn!("Failed to create device via API: {}, continuing anyway", e); - // Even if device creation fails, still return success (matches Python behavior) - Ok(serde_json::json!({ - "device_name": default_device_name, - "device_id": user_id, - "success": true - })) - } - } + tx.commit().await?; + Ok(true) } DatabasePool::MySQL(pool) => { - // Get the username and current sync type - let user_row = sqlx::query("SELECT Username, Pod_Sync_Type FROM Users WHERE UserID = ?") + let mut tx = pool.begin().await?; + + // First delete any device records + sqlx::query("DELETE FROM GpodderDevices WHERE UserID = ?") .bind(user_id) - .fetch_optional(pool) + .execute(&mut *tx) .await?; - let (username, current_sync_type) = if let Some(row) = user_row { - let username: String = row.try_get("Username")?; - let sync_type: Option = row.try_get("Pod_Sync_Type")?; - (username, sync_type.unwrap_or_else(|| "None".to_string())) - } else { - return Err(AppError::not_found("User not found")); - }; - - // Generate new sync type based on current - let new_sync_type = match current_sync_type.as_str() { - "external" => "both", - "None" | "" => "gpodder", - _ => ¤t_sync_type, - }; - - // Generate a secure internal token (64 characters alphanumeric) - use rand::{distr::Alphanumeric, Rng}; - let internal_token: String = rand::thread_rng() - .sample_iter(&Alphanumeric) - .take(64) - .map(char::from) - .collect(); - - let local_gpodder_url = "http://localhost:8042"; - - // Update user with internal gpodder settings - sqlx::query(" - UPDATE Users - SET GpodderUrl = ?, GpodderToken = ?, GpodderLoginName = ?, Pod_Sync_Type = ? + sqlx::query("DELETE FROM GpodderSyncState WHERE UserID = ?") + .bind(user_id) + .execute(&mut *tx) + .await?; + + // Then clear GPodder settings from user record + sqlx::query(r#" + UPDATE Users + SET GpodderUrl = '', GpodderLoginName = '', GpodderToken = '', Pod_Sync_Type = 'None' WHERE UserID = ? - ") - .bind(local_gpodder_url) - .bind(&internal_token) - .bind(&username) - .bind(new_sync_type) - .bind(user_id) - .execute(pool) - .await?; - - // Create default device name - let default_device_name = format!("pinepods-internal-{}", user_id); + "#) + .bind(user_id) + .execute(&mut *tx) + .await?; - // Create device via gPodder API (matches Python version exactly) - match self.create_device_via_gpodder_api(local_gpodder_url, &username, &internal_token, &default_device_name).await { - Ok(device_id) => { - Ok(serde_json::json!({ - "device_name": default_device_name, - "device_id": device_id, - "success": true - })) - } - Err(e) => { - tracing::warn!("Failed to create device via API: {}, continuing anyway", e); - // Even if device creation fails, still return success (matches Python behavior) - Ok(serde_json::json!({ - "device_name": default_device_name, - "device_id": user_id, - "success": true - })) - } - } + tx.commit().await?; + Ok(true) } } } - // Disable internal gpodder sync - matches Python disable_gpodder_internal_sync function exactly - pub async fn disable_gpodder_internal_sync(&self, user_id: i32) -> AppResult { - // Get current user gpodder status - let user_status = self.gpodder_get_status(user_id).await?; - let current_sync_type = &user_status.sync_type; + // Check if a user exists with the given username and email + pub async fn check_reset_user(&self, username: &str, email: &str) -> AppResult { + match self { + DatabasePool::Postgres(pool) => { + let result = sqlx::query(r#"SELECT userid FROM "Users" WHERE username = $1 AND email = $2"#) + .bind(username) + .bind(email) + .fetch_optional(pool) + .await?; + Ok(result.is_some()) + } + DatabasePool::MySQL(pool) => { + let result = sqlx::query("SELECT UserID FROM Users WHERE Username = ? AND Email = ?") + .bind(username) + .bind(email) + .fetch_optional(pool) + .await?; + Ok(result.is_some()) + } + } + } - // Determine new sync type - let new_sync_type = match current_sync_type.as_str() { - "both" => "external", - "gpodder" => "None", - _ => current_sync_type, - }; + // Create a password reset code for the user + pub async fn reset_password_create_code(&self, user_email: &str) -> AppResult> { + use rand::Rng; + use chrono::{Utc, Duration}; + + // Generate 6-character reset code with uppercase letters and digits + let reset_code: String = (0..6) + .map(|_| { + let chars = b"ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"; + chars[rand::rng().random_range(0..chars.len())] as char + }) + .collect(); + + let reset_expiry: sqlx::types::chrono::DateTime = (Utc::now() + Duration::hours(1)).into(); match self { DatabasePool::Postgres(pool) => { - // If internal API is being used, clear the settings - if user_status.gpodder_url.as_deref() == Some("http://localhost:8042") { - sqlx::query(r#" - UPDATE "Users" - SET gpodderurl = '', gpoddertoken = '', gpodderloginname = '', pod_sync_type = $1 - WHERE userid = $2 - "#) - .bind(new_sync_type) - .bind(user_id) - .execute(pool) + // Check if user exists first + let user_exists = sqlx::query(r#"SELECT userid FROM "Users" WHERE email = $1"#) + .bind(user_email) + .fetch_optional(pool) .await?; - } else { - // Just update the sync type - sqlx::query(r#"UPDATE "Users" SET pod_sync_type = $1 WHERE userid = $2"#) - .bind(new_sync_type) - .bind(user_id) - .execute(pool) - .await?; + + if user_exists.is_none() { + return Ok(None); } - } - DatabasePool::MySQL(pool) => { - // If internal API is being used, clear the settings - if user_status.gpodder_url.as_deref() == Some("http://localhost:8042") { - sqlx::query(" - UPDATE Users - SET GpodderUrl = '', GpodderToken = '', GpodderLoginName = '', Pod_Sync_Type = ? - WHERE UserID = ? - ") - .bind(new_sync_type) - .bind(user_id) + + // Update reset code and expiry + let result = sqlx::query(r#" + UPDATE "Users" + SET reset_code = $1, reset_expiry = $2 + WHERE email = $3 + "#) + .bind(&reset_code) + .bind(reset_expiry) + .bind(user_email) .execute(pool) .await?; + + if result.rows_affected() > 0 { + Ok(Some(reset_code)) } else { - // Just update the sync type - sqlx::query("UPDATE Users SET Pod_Sync_Type = ? WHERE UserID = ?") - .bind(new_sync_type) - .bind(user_id) - .execute(pool) - .await?; + Ok(None) } } - } + DatabasePool::MySQL(pool) => { + // Check if user exists first + let user_exists = sqlx::query("SELECT UserID FROM Users WHERE Email = ?") + .bind(user_email) + .fetch_optional(pool) + .await?; + + if user_exists.is_none() { + return Ok(None); + } - Ok(true) - } + // Update reset code and expiry + let result = sqlx::query(r#" + UPDATE Users + SET Reset_Code = ?, Reset_Expiry = ? + WHERE Email = ? + "#) + .bind(&reset_code) + .bind(reset_expiry) + .bind(user_email) + .execute(pool) + .await?; - // Helper function to create device via gPodder API - matches Python create device logic exactly - async fn create_device_via_gpodder_api(&self, gpodder_url: &str, username: &str, token: &str, device_name: &str) -> AppResult { - use reqwest; - use serde_json; - - let client = reqwest::Client::new(); - - // First, check if device already exists - let device_list_url = format!("{}/api/2/devices/{}.json", gpodder_url.trim_end_matches('/'), username); - - let auth = reqwest::header::HeaderValue::from_str(&format!("Basic {}", - base64::encode(format!("{}:{}", username, token)) - )).map_err(|e| AppError::internal(&format!("Failed to create auth header: {}", e)))?; - - match client.get(&device_list_url) - .header(reqwest::header::AUTHORIZATION, auth.clone()) - .send() - .await - { - Ok(response) if response.status().is_success() => { - if let Ok(devices) = response.json::>().await { - for device in devices { - if device.get("id").and_then(|v| v.as_str()) == Some(device_name) { - tracing::info!("Found existing device with ID: {}", device_name); - return Ok(device_name.to_string()); - } - } + if result.rows_affected() > 0 { + Ok(Some(reset_code)) + } else { + Ok(None) } } - Ok(response) => { - tracing::warn!("Failed to fetch device list: {}", response.status()); - } - Err(e) => { - tracing::warn!("Error fetching device list: {}", e); - } - } - - // Device doesn't exist, create it - let device_url = format!("{}/api/2/devices/{}/{}.json", gpodder_url.trim_end_matches('/'), username, device_name); - let device_data = serde_json::json!({ - "caption": format!("PinePods Internal Device {}", device_name.split('-').last().unwrap_or("unknown")), - "type": "server" - }); - - match client.post(&device_url) - .header(reqwest::header::AUTHORIZATION, auth) - .json(&device_data) - .send() - .await - { - Ok(response) if response.status().is_success() => { - tracing::info!("Created device with ID: {}", device_name); - Ok(device_name.to_string()) - } - Ok(response) => { - let status = response.status(); - let error_text = response.text().await.unwrap_or_else(|_| "Unknown error".to_string()); - Err(AppError::internal(&format!("Failed to create device: {} - {}", status, error_text))) - } - Err(e) => { - Err(AppError::internal(&format!("Error creating device via API: {}", e))) - } } } - // Background task for GPodder subscription refresh - matches Python refresh_gpodder_subscription_for_background - pub async fn refresh_gpodder_subscription_background(&self, user_id: i32) -> AppResult { - // Get user sync settings - let settings_opt = self.get_user_sync_settings(user_id).await?; - let settings = match settings_opt { - Some(s) => s, - None => return Ok(false), // No sync configured - }; - - // Get default device - let device_name = match self.get_default_gpodder_device_name(user_id).await? { - Some(name) => name, - None => format!("pinepods-internal-{}", user_id), // Fallback device name - }; - - // Call the appropriate sync method based on sync type - match settings.sync_type.as_str() { - "gpodder" => { - // Internal GPodder API - self.call_gpodder_service_sync(user_id, "http://localhost:8042", &settings.username, &settings.token, &device_name, false).await - } - "external" => { - // External GPodder server - decrypt token using existing encryption system - let decrypted_token = match self.decrypt_gpodder_token(&settings.token).await { - Ok(token) => token, - Err(_) => settings.token.clone(), // Fallback to original token if decryption fails - }; - self.call_gpodder_service_sync(user_id, &settings.url, &settings.username, &decrypted_token, &device_name, false).await - } - "both" => { - // Both internal and external - let internal_result = self.call_gpodder_service_sync(user_id, "http://localhost:8042", &settings.username, &settings.token, &device_name, false).await?; - let decrypted_token = match self.decrypt_gpodder_token(&settings.token).await { - Ok(token) => token, - Err(_) => settings.token.clone(), - }; - let external_result = self.call_gpodder_service_sync(user_id, &settings.url, &settings.username, &decrypted_token, &device_name, false).await?; - Ok(internal_result || external_result) + // Remove reset code from user (used when email sending fails) + pub async fn reset_password_remove_code(&self, email: &str) -> AppResult<()> { + match self { + DatabasePool::Postgres(pool) => { + sqlx::query(r#"UPDATE "Users" SET reset_code = NULL, reset_expiry = NULL WHERE email = $1"#) + .bind(email) + .execute(pool) + .await?; } - "nextcloud" => { - // Nextcloud sync - use existing nextcloud refresh functionality - self.refresh_nextcloud_subscription_background(user_id).await + DatabasePool::MySQL(pool) => { + sqlx::query("UPDATE Users SET Reset_Code = NULL, Reset_Expiry = NULL WHERE Email = ?") + .bind(email) + .execute(pool) + .await?; } - _ => Ok(false), // No sync or unsupported type } + Ok(()) } - // Helper to get default device name - async fn get_default_gpodder_device_name(&self, user_id: i32) -> AppResult> { + // Verify reset code is valid and not expired + pub async fn verify_reset_code(&self, user_email: &str, reset_code: &str) -> AppResult> { + use chrono::Utc; + match self { DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#"SELECT devicename FROM "GpodderDevices" WHERE userid = $1 AND isdefault = true LIMIT 1"#) - .bind(user_id) + let result = sqlx::query(r#"SELECT reset_code, reset_expiry FROM "Users" WHERE email = $1"#) + .bind(user_email) .fetch_optional(pool) .await?; - Ok(row.and_then(|r| r.try_get("devicename").ok())) + if let Some(row) = result { + let stored_code: Option = row.try_get("reset_code").ok(); + let expiry: Option = row.try_get("reset_expiry").ok(); + + if let (Some(stored_code), Some(expiry)) = (stored_code.clone(), expiry.clone()) { + // Convert NaiveDateTime to UTC for comparison + let expiry_utc = expiry.and_utc(); + let is_valid = stored_code == reset_code && Utc::now() < expiry_utc; + Ok(Some(is_valid)) + } else { + Ok(Some(false)) // No reset code set + } + } else { + Ok(None) // User not found + } } DatabasePool::MySQL(pool) => { - let row = sqlx::query("SELECT DeviceName FROM GpodderDevices WHERE UserID = ? AND IsDefault = 1 LIMIT 1") - .bind(user_id) + let result = sqlx::query("SELECT Reset_Code, Reset_Expiry FROM Users WHERE Email = ?") + .bind(user_email) .fetch_optional(pool) .await?; - Ok(row.and_then(|r| r.try_get("DeviceName").ok())) + if let Some(row) = result { + let stored_code: Option = row.try_get("Reset_Code").ok(); + let expiry: Option> = row.try_get("Reset_Expiry").ok(); + + if let (Some(stored_code), Some(expiry)) = (stored_code, expiry) { + Ok(Some(stored_code == reset_code && Utc::now() < expiry)) + } else { + Ok(Some(false)) // No reset code set + } + } else { + Ok(None) // User not found + } } } } - // Decrypt GPodder token using existing encryption system - matches Python token decryption - // Get comprehensive GPodder server statistics by calling actual GPodder API endpoints - pub async fn get_gpodder_server_statistics(&self, user_id: i32) -> AppResult { - use crate::handlers::sync::{GpodderStatistics, ServerDevice, ServerSubscription, ServerEpisodeAction, EndpointTest}; - use std::time::Instant; - - // Get user's sync settings using the same method as sync operations - let sync_settings = self.get_user_sync_settings(user_id).await?; - let settings = match sync_settings { - Some(s) => s, - None => { - return Ok(GpodderStatistics { - server_url: "No sync configured".to_string(), - sync_type: "None".to_string(), - sync_enabled: false, - server_devices: vec![], - total_devices: 0, - server_subscriptions: vec![], - total_subscriptions: 0, - recent_episode_actions: vec![], - total_episode_actions: 0, - connection_status: "Not configured".to_string(), - last_sync_timestamp: None, - api_endpoints_tested: vec![], - }); - } - }; - - // Use the same authentication logic as sync operations - let (gpodder_url, username, password) = match settings.sync_type.as_str() { - "gpodder" => { - // Internal gPodder API - use token directly (no decryption needed) - ("http://localhost:8042".to_string(), settings.username.clone(), settings.token.clone()) - } - "external" => { - // External gPodder server - decrypt token first - let decrypted_token = self.decrypt_password(&settings.token).await?; - (settings.url.clone(), settings.username.clone(), decrypted_token) - } - "nextcloud" => { - // Nextcloud sync - decrypt token first - let decrypted_token = self.decrypt_password(&settings.token).await?; - (settings.url.clone(), settings.username.clone(), decrypted_token) - } - _ => { - return Ok(GpodderStatistics { - server_url: settings.url.clone(), - sync_type: settings.sync_type.clone(), - sync_enabled: false, - server_devices: vec![], - total_devices: 0, - server_subscriptions: vec![], - total_subscriptions: 0, - recent_episode_actions: vec![], - total_episode_actions: 0, - connection_status: "Unsupported sync type".to_string(), - last_sync_timestamp: None, - api_endpoints_tested: vec![], - }); - } - }; - - // Create GPodder session using the same method as sync operations - let session = self.create_gpodder_session(&UserSyncSettings { - url: gpodder_url.clone(), - username: username.clone(), - token: password.clone(), - sync_type: settings.sync_type.clone(), - }).await?; - - let mut api_endpoints_tested = Vec::new(); - let mut server_devices = Vec::new(); - let mut server_subscriptions = Vec::new(); - let mut recent_episode_actions = Vec::new(); - - // Test 1: Get devices from GPodder API - let devices_url = format!("{}/api/2/devices/{}.json", gpodder_url.trim_end_matches('/'), username); - let start = Instant::now(); - - let devices_response = if session.authenticated { - session.client.get(&devices_url).send().await - } else { - session.client.get(&devices_url).basic_auth(&username, Some(&password)).send().await - }; - - match devices_response - { - Ok(resp) if resp.status().is_success() => { - let duration = start.elapsed().as_millis() as i64; - api_endpoints_tested.push(EndpointTest { - endpoint: "GET /api/2/devices/{username}.json".to_string(), - status: "success".to_string(), - response_time_ms: Some(duration), - error: None, - }); - - match resp.json::().await { - Ok(devices_data) => { - if let Some(devices_array) = devices_data.as_array() { - for device in devices_array { - server_devices.push(ServerDevice { - id: device["id"].as_str().unwrap_or("unknown").to_string(), - caption: device["caption"].as_str().unwrap_or("").to_string(), - device_type: device["type"].as_str().unwrap_or("unknown").to_string(), - subscriptions: device["subscriptions"].as_i64().unwrap_or(0) as i32, - }); - } - } - } - Err(e) => { - tracing::warn!("Failed to parse devices response: {}", e); - } + // Reset password and clear reset code/expiry + pub async fn reset_password_prompt(&self, user_email: &str, hashed_password: &str) -> AppResult> { + match self { + DatabasePool::Postgres(pool) => { + let result = sqlx::query(r#" + UPDATE "Users" + SET hashed_pw = $1, reset_code = NULL, reset_expiry = NULL + WHERE email = $2 + "#) + .bind(hashed_password) + .bind(user_email) + .execute(pool) + .await?; + + if result.rows_affected() > 0 { + Ok(Some("Password Reset Successfully".to_string())) + } else { + Ok(None) } } - Ok(resp) => { - let duration = start.elapsed().as_millis() as i64; - api_endpoints_tested.push(EndpointTest { - endpoint: "GET /api/2/devices/{username}.json".to_string(), - status: "failed".to_string(), - response_time_ms: Some(duration), - error: Some(format!("HTTP {}", resp.status())), - }); - } - Err(e) => { - let duration = start.elapsed().as_millis() as i64; - api_endpoints_tested.push(EndpointTest { - endpoint: "GET /api/2/devices/{username}.json".to_string(), - status: "failed".to_string(), - response_time_ms: Some(duration), - error: Some(e.to_string()), - }); + DatabasePool::MySQL(pool) => { + let result = sqlx::query(r#" + UPDATE Users + SET Hashed_PW = ?, Reset_Code = NULL, Reset_Expiry = NULL + WHERE Email = ? + "#) + .bind(hashed_password) + .bind(user_email) + .execute(pool) + .await?; + + if result.rows_affected() > 0 { + Ok(Some("Password Reset Successfully".to_string())) + } else { + Ok(None) + } } } + } - // Test 2: Get subscriptions from GPodder API - let default_device = server_devices.first().map(|d| d.id.clone()).unwrap_or_else(|| "default".to_string()); - let subscriptions_url = format!("{}/api/2/subscriptions/{}/{}.json", - gpodder_url.trim_end_matches('/'), username, default_device); - let start = Instant::now(); - - let subscriptions_response = if session.authenticated { - session.client.get(&subscriptions_url).send().await - } else { - session.client.get(&subscriptions_url).basic_auth(&username, Some(&password)).send().await - }; + // Set scheduled backup configuration + pub async fn set_scheduled_backup(&self, user_id: i32, cron_schedule: &str, enabled: bool) -> AppResult<()> { + match self { + DatabasePool::Postgres(pool) => { + sqlx::query(r#" + INSERT INTO "ScheduledBackups" (userid, cron_schedule, enabled, created_at, updated_at) + VALUES ($1, $2, $3, NOW(), NOW()) + ON CONFLICT (userid) + DO UPDATE SET + cron_schedule = EXCLUDED.cron_schedule, + enabled = EXCLUDED.enabled, + updated_at = NOW() + "#) + .bind(user_id) + .bind(cron_schedule) + .bind(enabled) + .execute(pool) + .await?; + } + DatabasePool::MySQL(pool) => { + sqlx::query(r#" + INSERT INTO ScheduledBackups (UserID, CronSchedule, Enabled, CreatedAt, UpdatedAt) + VALUES (?, ?, ?, NOW(), NOW()) + ON DUPLICATE KEY UPDATE + CronSchedule = VALUES(CronSchedule), + Enabled = VALUES(Enabled), + UpdatedAt = NOW() + "#) + .bind(user_id) + .bind(cron_schedule) + .bind(enabled) + .execute(pool) + .await?; + } + } + Ok(()) + } - match subscriptions_response - { - Ok(resp) if resp.status().is_success() => { - let duration = start.elapsed().as_millis() as i64; - api_endpoints_tested.push(EndpointTest { - endpoint: "GET /api/2/subscriptions/{username}/{device}.json".to_string(), - status: "success".to_string(), - response_time_ms: Some(duration), - error: None, - }); + // Get scheduled backup configuration + pub async fn get_scheduled_backup(&self, user_id: i32) -> AppResult { + match self { + DatabasePool::Postgres(pool) => { + let row = sqlx::query(r#" + SELECT cron_schedule, enabled, created_at, updated_at + FROM "ScheduledBackups" + WHERE userid = $1 + "#) + .bind(user_id) + .fetch_optional(pool) + .await?; - match resp.json::().await { - Ok(subs_data) => { - if let Some(subs_array) = subs_data.as_array() { - for sub in subs_array { - if let Some(url) = sub.as_str() { - server_subscriptions.push(ServerSubscription { - url: url.to_string(), - title: None, - description: None, - }); - } - } - } - } - Err(e) => { - tracing::warn!("Failed to parse subscriptions response: {}", e); - } + if let Some(row) = row { + Ok(serde_json::json!({ + "schedule": row.get::("cron_schedule"), + "enabled": row.get::("enabled"), + "created_at": row.get::("created_at").format("%Y-%m-%dT%H:%M:%S").to_string(), + "updated_at": row.get::("updated_at").format("%Y-%m-%dT%H:%M:%S").to_string() + })) + } else { + Ok(serde_json::json!({ + "schedule": null, + "enabled": false, + "created_at": null, + "updated_at": null + })) } } - Ok(resp) => { - let duration = start.elapsed().as_millis() as i64; - api_endpoints_tested.push(EndpointTest { - endpoint: "GET /api/2/subscriptions/{username}/{device}.json".to_string(), - status: "failed".to_string(), - response_time_ms: Some(duration), - error: Some(format!("HTTP {}", resp.status())), - }); - } - Err(e) => { - let duration = start.elapsed().as_millis() as i64; - api_endpoints_tested.push(EndpointTest { - endpoint: "GET /api/2/subscriptions/{username}/{device}.json".to_string(), - status: "failed".to_string(), - response_time_ms: Some(duration), - error: Some(e.to_string()), - }); + DatabasePool::MySQL(pool) => { + let row = sqlx::query(r#" + SELECT CronSchedule, Enabled, CreatedAt, UpdatedAt + FROM ScheduledBackups + WHERE UserID = ? + "#) + .bind(user_id) + .fetch_optional(pool) + .await?; + + if let Some(row) = row { + let created_datetime = row.try_get::, _>("CreatedAt")?; + let updated_datetime = row.try_get::, _>("UpdatedAt")?; + + Ok(serde_json::json!({ + "schedule": row.try_get::("CronSchedule")?, + "enabled": row.try_get::("Enabled")?, + "created_at": created_datetime.format("%Y-%m-%dT%H:%M:%S").to_string(), + "updated_at": updated_datetime.format("%Y-%m-%dT%H:%M:%S").to_string() + })) + } else { + Ok(serde_json::json!({ + "schedule": null, + "enabled": false, + "created_at": null, + "updated_at": null + })) + } } } + } - // Test 3: Get episode actions from GPodder API - let episodes_url = format!("{}/api/2/episodes/{}.json", gpodder_url.trim_end_matches('/'), username); - let start = Instant::now(); + // Execute backup to file (called by scheduler) + pub async fn execute_scheduled_backup(&self, _user_id: i32) -> AppResult { + use tokio::process::Command; + use chrono::Utc; - let episodes_response = if session.authenticated { - session.client.get(&episodes_url).send().await - } else { - session.client.get(&episodes_url).basic_auth(&username, Some(&password)).send().await - }; + // Generate backup filename with timestamp + let timestamp = Utc::now().format("%Y%m%d_%H%M%S"); + let backup_filename = format!("scheduled_backup_{}.sql", timestamp); + let backup_path = format!("/opt/pinepods/backups/{}", backup_filename); - match episodes_response - { - Ok(resp) if resp.status().is_success() => { - let duration = start.elapsed().as_millis() as i64; - api_endpoints_tested.push(EndpointTest { - endpoint: "GET /api/2/episodes/{username}.json".to_string(), - status: "success".to_string(), - response_time_ms: Some(duration), - error: None, - }); + // Get database password from environment + let db_password = std::env::var("DB_PASSWORD") + .map_err(|_| AppError::internal("Database password not found in environment"))?; - match resp.json::().await { - Ok(episodes_data) => { - if let Some(actions) = episodes_data["actions"].as_array() { - for action in actions.iter().take(10) { // Show last 10 actions - recent_episode_actions.push(ServerEpisodeAction { - podcast: action["podcast"].as_str().unwrap_or("").to_string(), - episode: action["episode"].as_str().unwrap_or("").to_string(), - action: action["action"].as_str().unwrap_or("").to_string(), - timestamp: action["timestamp"].as_str().unwrap_or("").to_string(), - position: action["position"].as_i64().map(|p| p as i32), - device: action["device"].as_str().map(|s| s.to_string()), - }); - } - } - } - Err(e) => { - tracing::warn!("Failed to parse episode actions response: {}", e); - } + match self { + DatabasePool::Postgres(_) => { + let db_host = std::env::var("DB_HOST").unwrap_or_else(|_| "localhost".to_string()); + let db_port = std::env::var("DB_PORT").unwrap_or_else(|_| "5432".to_string()); + let db_user = std::env::var("DB_USER").unwrap_or_else(|_| "postgres".to_string()); + let db_name = std::env::var("DB_NAME").unwrap_or_else(|_| "pinepods_database".to_string()); + + let mut cmd = Command::new("pg_dump"); + cmd.arg("-h").arg(&db_host) + .arg("-p").arg(&db_port) + .arg("-U").arg(&db_user) + .arg("-d").arg(&db_name) + .arg("-f").arg(&backup_path) + .arg("--verbose") + .env("PGPASSWORD", &db_password); + + let output = cmd.output().await + .map_err(|e| AppError::internal(&format!("Failed to execute backup: {}", e)))?; + + if !output.status.success() { + let error_msg = String::from_utf8_lossy(&output.stderr); + return Err(AppError::internal(&format!("Backup failed: {}", error_msg))); } } - Ok(resp) => { - let duration = start.elapsed().as_millis() as i64; - api_endpoints_tested.push(EndpointTest { - endpoint: "GET /api/2/episodes/{username}.json".to_string(), - status: "failed".to_string(), - response_time_ms: Some(duration), - error: Some(format!("HTTP {}", resp.status())), - }); - } - Err(e) => { - let duration = start.elapsed().as_millis() as i64; - api_endpoints_tested.push(EndpointTest { - endpoint: "GET /api/2/episodes/{username}.json".to_string(), - status: "failed".to_string(), - response_time_ms: Some(duration), - error: Some(e.to_string()), - }); + DatabasePool::MySQL(_) => { + let db_host = std::env::var("DB_HOST").unwrap_or_else(|_| "localhost".to_string()); + let db_port = std::env::var("DB_PORT").unwrap_or_else(|_| "3306".to_string()); + let db_user = std::env::var("DB_USER").unwrap_or_else(|_| "mysql".to_string()); + let db_name = std::env::var("DB_NAME").unwrap_or_else(|_| "pinepods_database".to_string()); + + let mut cmd = Command::new("mysqldump"); + cmd.arg("-h").arg(&db_host) + .arg("-P").arg(&db_port) + .arg("-u").arg(&db_user) + .arg(format!("-p{}", &db_password)) + .arg(&db_name) + .arg("--result-file").arg(&backup_path) + .arg("--single-transaction") + .arg("--routines") + .arg("--triggers"); + + let output = cmd.output().await + .map_err(|e| AppError::internal(&format!("Failed to execute backup: {}", e)))?; + + if !output.status.success() { + let error_msg = String::from_utf8_lossy(&output.stderr); + return Err(AppError::internal(&format!("Backup failed: {}", error_msg))); + } } } - // Get sync status - let status = self.gpodder_get_status(user_id).await?; - let last_sync = self.get_last_sync_timestamp(user_id).await?; - - // Determine overall connection status - let connection_status = if api_endpoints_tested.iter().any(|t| t.status == "success") { - if api_endpoints_tested.iter().all(|t| t.status == "success") { - "All endpoints working" - } else { - "Partial connectivity" + Ok(backup_filename) + } + + // Get podcasts that have podcast_index_id = 0 (imported without podcast index match) + pub async fn get_unmatched_podcasts(&self, user_id: i32) -> AppResult> { + match self { + DatabasePool::Postgres(pool) => { + let rows = sqlx::query( + r#"SELECT podcastid, podcastname, artworkurl, author, description, feedurl + FROM "Podcasts" + WHERE userid = $1 AND (podcastindexid = 0 OR podcastindexid IS NULL) + AND (ignorepodcastindex = FALSE OR ignorepodcastindex IS NULL) + ORDER BY podcastname"# + ) + .bind(user_id) + .fetch_all(pool) + .await?; + + let mut podcasts = Vec::new(); + for row in rows { + let podcast = serde_json::json!({ + "podcast_id": row.try_get::("podcastid")?, + "podcast_name": row.try_get::("podcastname")?, + "artwork_url": row.try_get::, _>("artworkurl")?, + "author": row.try_get::, _>("author")?, + "description": row.try_get::, _>("description")?, + "feed_url": row.try_get::("feedurl")? + }); + podcasts.push(podcast); + } + + Ok(podcasts) } - } else { - "Connection failed" - }; + DatabasePool::MySQL(pool) => { + let rows = sqlx::query( + "SELECT PodcastID, PodcastName, ArtworkURL, Author, Description, FeedURL + FROM Podcasts + WHERE UserID = ? AND (PodcastIndexID = 0 OR PodcastIndexID IS NULL) + AND (IgnorePodcastIndex = 0 OR IgnorePodcastIndex IS NULL) + ORDER BY PodcastName" + ) + .bind(user_id) + .fetch_all(pool) + .await?; - Ok(GpodderStatistics { - server_url: gpodder_url, - sync_type: status.sync_type.clone(), - sync_enabled: status.sync_type != "None", - server_devices: server_devices.clone(), - total_devices: server_devices.len() as i32, - server_subscriptions: server_subscriptions.clone(), - total_subscriptions: server_subscriptions.len() as i32, - recent_episode_actions: recent_episode_actions.clone(), - total_episode_actions: recent_episode_actions.len() as i32, - connection_status: connection_status.to_string(), - last_sync_timestamp: last_sync.map(|dt| dt.format("%Y-%m-%d %H:%M:%S UTC").to_string()), - api_endpoints_tested, - }) + let mut podcasts = Vec::new(); + for row in rows { + let podcast = serde_json::json!({ + "podcast_id": row.try_get::("PodcastID")?, + "podcast_name": row.try_get::("PodcastName")?, + "artwork_url": row.try_get::, _>("ArtworkURL")?, + "author": row.try_get::, _>("Author")?, + "description": row.try_get::, _>("Description")?, + "feed_url": row.try_get::("FeedURL")? + }); + podcasts.push(podcast); + } + + Ok(podcasts) + } + } + } + + // Update a podcast's podcast_index_id + pub async fn update_podcast_index_id(&self, user_id: i32, podcast_id: i32, podcast_index_id: i32) -> AppResult<()> { + match self { + DatabasePool::Postgres(pool) => { + sqlx::query( + r#"UPDATE "Podcasts" + SET podcastindexid = $1 + WHERE podcastid = $2 AND userid = $3"# + ) + .bind(podcast_index_id) + .bind(podcast_id) + .bind(user_id) + .execute(pool) + .await?; + } + DatabasePool::MySQL(pool) => { + sqlx::query( + "UPDATE Podcasts + SET PodcastIndexID = ? + WHERE PodcastID = ? AND UserID = ?" + ) + .bind(podcast_index_id) + .bind(podcast_id) + .bind(user_id) + .execute(pool) + .await?; + } + } + Ok(()) } - async fn decrypt_gpodder_token(&self, encrypted_token: &str) -> AppResult { - // Get encryption key from app settings - let encryption_key = match self { + // Ignore/unignore a podcast's index ID requirement + pub async fn ignore_podcast_index_id(&self, user_id: i32, podcast_id: i32, ignore: bool) -> AppResult<()> { + match self { DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#"SELECT encryptionkey FROM "AppSettings" WHERE appsettingsid = 1"#) - .fetch_optional(pool) - .await?; - - row.and_then(|r| r.try_get("encryptionkey").ok()) + sqlx::query( + r#"UPDATE "Podcasts" + SET ignorepodcastindex = $1 + WHERE podcastid = $2 AND userid = $3"# + ) + .bind(ignore) + .bind(podcast_id) + .bind(user_id) + .execute(pool) + .await?; } DatabasePool::MySQL(pool) => { - let row = sqlx::query("SELECT EncryptionKey FROM AppSettings WHERE AppSettingsID = 1") - .fetch_optional(pool) - .await?; - - row.and_then(|r| r.try_get("EncryptionKey").ok()) + sqlx::query( + "UPDATE Podcasts + SET IgnorePodcastIndex = ? + WHERE PodcastID = ? AND UserID = ?" + ) + .bind(ignore) + .bind(podcast_id) + .bind(user_id) + .execute(pool) + .await?; } - }; - - let encryption_key: String = encryption_key - .ok_or_else(|| AppError::internal("Encryption key not found"))?; - - // Decrypt using Fernet (matches Python implementation) - use fernet::Fernet; - let fernet = match Fernet::new(&encryption_key) { - Some(f) => f, - None => return Err(AppError::internal("Failed to create Fernet cipher with provided key")), - }; - - let decrypted = fernet.decrypt(encrypted_token) - .map_err(|e| AppError::internal(&format!("Failed to decrypt token: {}", e)))?; - - String::from_utf8(decrypted) - .map_err(|e| AppError::internal(&format!("Failed to parse decrypted token: {}", e))) + } + Ok(()) } - // Nextcloud subscription refresh for background tasks - matches Python nextcloud refresh - async fn refresh_nextcloud_subscription_background(&self, user_id: i32) -> AppResult { - // Get user nextcloud settings - let settings = match self { + // Get ignored podcasts for a user + pub async fn get_ignored_podcasts(&self, user_id: i32) -> AppResult> { + match self { DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#"SELECT gpodderurl, gpoddertoken, gpodderloginname FROM "Users" WHERE userid = $1"#) - .bind(user_id) - .fetch_optional(pool) - .await?; - - if let Some(r) = row { - ( - r.try_get::, _>("gpodderurl")?.unwrap_or_default(), - r.try_get::, _>("gpoddertoken")?.unwrap_or_default(), - r.try_get::, _>("gpodderloginname")?.unwrap_or_default(), - ) - } else { - return Ok(false); + let rows = sqlx::query( + r#"SELECT podcastid, podcastname, artworkurl, author, description, feedurl + FROM "Podcasts" + WHERE userid = $1 AND ignorepodcastindex = TRUE + ORDER BY podcastname"# + ) + .bind(user_id) + .fetch_all(pool) + .await?; + + let mut podcasts = Vec::new(); + for row in rows { + let podcast = serde_json::json!({ + "podcast_id": row.try_get::("podcastid")?, + "podcast_name": row.try_get::("podcastname")?, + "artwork_url": row.try_get::, _>("artworkurl")?, + "author": row.try_get::, _>("author")?, + "description": row.try_get::, _>("description")?, + "feed_url": row.try_get::("feedurl")? + }); + podcasts.push(podcast); } + + Ok(podcasts) } DatabasePool::MySQL(pool) => { - let row = sqlx::query("SELECT GpodderUrl, GpodderToken, GpodderLoginName FROM Users WHERE UserID = ?") - .bind(user_id) - .fetch_optional(pool) - .await?; - - if let Some(r) = row { - ( - r.try_get::, _>("GpodderUrl")?.unwrap_or_default(), - r.try_get::, _>("GpodderToken")?.unwrap_or_default(), - r.try_get::, _>("GpodderLoginName")?.unwrap_or_default(), - ) - } else { - return Ok(false); - } - } - }; + let rows = sqlx::query( + "SELECT PodcastID, PodcastName, ArtworkURL, Author, Description, FeedURL + FROM Podcasts + WHERE UserID = ? AND IgnorePodcastIndex = 1 + ORDER BY PodcastName" + ) + .bind(user_id) + .fetch_all(pool) + .await?; - let (gpodder_url, gpodder_token, gpodder_login) = settings; + let mut podcasts = Vec::new(); + for row in rows { + let podcast = serde_json::json!({ + "podcast_id": row.try_get::("PodcastID")?, + "podcast_name": row.try_get::("PodcastName")?, + "artwork_url": row.try_get::, _>("ArtworkURL")?, + "author": row.try_get::, _>("Author")?, + "description": row.try_get::, _>("Description")?, + "feed_url": row.try_get::("FeedURL")? + }); + podcasts.push(podcast); + } - if gpodder_url.is_empty() || gpodder_login.is_empty() { - return Ok(false); + Ok(podcasts) + } } - - // Call existing nextcloud sync functionality - self.sync_with_nextcloud_for_user(user_id).await } - // Get last sync timestamp for incremental sync - PROPER GPodder spec implementation - async fn get_last_sync_timestamp(&self, user_id: i32) -> AppResult>> { + // Add shared episode - uses current database schema with ShareCode + pub async fn add_shared_episode(&self, episode_id: i32, shared_by: i32, share_code: &str, expiration_date: chrono::DateTime) -> AppResult { match self { - DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#"SELECT lastsynctime FROM "Users" WHERE userid = $1"#) - .bind(user_id) - .fetch_optional(pool) - .await?; - - if let Some(row) = row { - Ok(row.try_get("lastsynctime").unwrap_or(None)) - } else { - Ok(None) + DatabasePool::Postgres(pool) => { + let result = sqlx::query(r#" + INSERT INTO "SharedEpisodes" (episodeid, sharedby, sharecode, expirationdate) + VALUES ($1, $2, $3, $4) + "#) + .bind(episode_id) + .bind(shared_by) + .bind(share_code) + .bind(expiration_date) + .execute(pool) + .await; + + match result { + Ok(_) => Ok(true), + Err(e) => { + tracing::error!("Error sharing episode: {}", e); + Ok(false) + } } } - DatabasePool::MySQL(pool) => { - let row = sqlx::query("SELECT LastSyncTime FROM Users WHERE UserID = ?") - .bind(user_id) - .fetch_optional(pool) - .await?; - - if let Some(row) = row { - Ok(row.try_get("LastSyncTime").unwrap_or(None)) - } else { - Ok(None) + DatabasePool::MySQL(pool) => { + let result = sqlx::query( + "INSERT INTO SharedEpisodes (EpisodeID, SharedBy, ShareCode, ExpirationDate) VALUES (?, ?, ?, ?)" + ) + .bind(episode_id) + .bind(shared_by) + .bind(share_code) + .bind(expiration_date) + .execute(pool) + .await; + + match result { + Ok(_) => Ok(true), + Err(e) => { + tracing::error!("Error sharing episode: {}", e); + Ok(false) + } } } } } - - // Update last sync timestamp - PROPER GPodder spec implementation for incremental sync - async fn update_last_sync_timestamp(&self, user_id: i32) -> AppResult<()> { - let now = chrono::Utc::now(); - + + // Get episode ID by share code (for shared episode access) + pub async fn get_episode_id_by_share_code(&self, share_code: &str) -> AppResult> { match self { - DatabasePool::Postgres(pool) => { - sqlx::query(r#"UPDATE "Users" SET lastsynctime = $1 WHERE userid = $2"#) - .bind(now) - .bind(user_id) - .execute(pool) - .await?; + DatabasePool::Postgres(pool) => { + let result = sqlx::query_as::<_, (i32,)>(r#" + SELECT episodeid FROM "SharedEpisodes" + WHERE sharecode = $1 AND expirationdate > NOW() + "#) + .bind(share_code) + .fetch_optional(pool) + .await?; + + Ok(result.map(|row| row.0)) } - DatabasePool::MySQL(pool) => { - sqlx::query("UPDATE Users SET LastSyncTime = ? WHERE UserID = ?") - .bind(now) - .bind(user_id) - .execute(pool) - .await?; + DatabasePool::MySQL(pool) => { + let result = sqlx::query_as::<_, (i32,)>( + "SELECT EpisodeID FROM SharedEpisodes + WHERE ShareCode = ? AND ExpirationDate > NOW()" + ) + .bind(share_code) + .fetch_optional(pool) + .await?; + + Ok(result.map(|row| row.0)) } } - - Ok(()) } - // Get user episode actions since timestamp - CRITICAL for incremental sync performance - async fn get_user_episode_actions_since(&self, user_id: i32, since: chrono::DateTime) -> AppResult> { + // Get shared episode metadata - bypasses user restrictions for public access + pub async fn get_shared_episode_metadata(&self, episode_id: i32) -> AppResult { match self { DatabasePool::Postgres(pool) => { - let rows = sqlx::query(r#" - SELECT - e.episodeurl as podcast, - e.episodeurl as episode, - eh.listenduration as position, - CASE - WHEN eh.listenduration > 0 THEN 'play' - WHEN d.episodeid IS NOT NULL THEN 'download' - ELSE 'new' - END as action, - COALESCE(eh.listendate, '1970-01-01'::timestamp) as timestamp - FROM "Episodes" e - LEFT JOIN "UserEpisodeHistory" eh ON e.episodeid = eh.episodeid AND eh.userid = $1 - LEFT JOIN "DownloadedEpisodes" d ON e.episodeid = d.episodeid AND d.userid = $1 - WHERE (eh.userid = $1 OR d.userid = $1) - AND COALESCE(eh.listendate, '1970-01-01'::timestamp) > $2 - ORDER BY timestamp DESC - "#) - .bind(user_id) - .bind(since) - .fetch_all(pool) + // First try regular episodes + let row = sqlx::query( + r#"SELECT + "Podcasts".podcastid, + "Podcasts".feedurl, + "Podcasts".podcastname, + "Podcasts".artworkurl, + "Episodes".episodetitle, + "Episodes".episodepubdate, + "Episodes".episodedescription, + "Episodes".episodeartwork, + "Episodes".episodeurl, + "Episodes".episodeduration, + "Episodes".episodeid, + "Podcasts".websiteurl, + "Episodes".completed, + FALSE::boolean as is_youtube + FROM "Episodes" + INNER JOIN "Podcasts" ON "Episodes".podcastid = "Podcasts".podcastid + WHERE "Episodes".episodeid = $1"# + ) + .bind(episode_id) + .fetch_optional(pool) .await?; - - let mut actions = Vec::new(); - for row in rows { - actions.push(serde_json::json!({ - "podcast": row.try_get::("podcast")?, - "episode": row.try_get::("episode")?, - "action": row.try_get::("action")?, - "timestamp": row.try_get::, _>("timestamp")?.to_rfc3339(), - "position": row.try_get::, _>("position").unwrap_or(None) - })); + + if let Some(row) = row { + let episodepubdate = row.try_get::("episodepubdate")? + .format("%Y-%m-%dT%H:%M:%S").to_string(); + + Ok(serde_json::json!({ + "podcastid": row.try_get::("podcastid")?, + "feedurl": row.try_get::, _>("feedurl")?, + "podcastname": row.try_get::("podcastname")?, + "artworkurl": row.try_get::, _>("artworkurl")?, + "episodetitle": row.try_get::("episodetitle")?, + "episodepubdate": episodepubdate, + "episodedescription": row.try_get::, _>("episodedescription")?, + "episodeartwork": row.try_get::, _>("episodeartwork")?, + "episodeurl": row.try_get::("episodeurl")?, + "episodeduration": row.try_get::, _>("episodeduration")?, + "episodeid": row.try_get::("episodeid")?, + "websiteurl": row.try_get::, _>("websiteurl")?, + "listenduration": None::, // No user-specific data for shared episodes + "completed": row.try_get::, _>("completed")?, + "is_youtube": false + })) + } else { + // Try YouTube videos + let row = sqlx::query( + r#"SELECT + "Podcasts".podcastid, + "Podcasts".feedurl, + "Podcasts".podcastname, + "Podcasts".artworkurl, + "YouTubeVideos".videotitle as episodetitle, + "YouTubeVideos".publishedat as episodepubdate, + "YouTubeVideos".videodescription as episodedescription, + "YouTubeVideos".thumbnailurl as episodeartwork, + "YouTubeVideos".videourl as episodeurl, + "YouTubeVideos".duration as episodeduration, + "YouTubeVideos".videoid as episodeid, + "Podcasts".websiteurl, + "YouTubeVideos".completed, + TRUE::boolean as is_youtube + FROM "YouTubeVideos" + INNER JOIN "Podcasts" ON "YouTubeVideos".podcastid = "Podcasts".podcastid + WHERE "YouTubeVideos".videoid = $1"# + ) + .bind(episode_id) + .fetch_optional(pool) + .await?; + + if let Some(row) = row { + let episodepubdate = row.try_get::("episodepubdate")? + .format("%Y-%m-%dT%H:%M:%S").to_string(); + + Ok(serde_json::json!({ + "podcastid": row.try_get::("podcastid")?, + "feedurl": row.try_get::, _>("feedurl")?, + "podcastname": row.try_get::("podcastname")?, + "artworkurl": row.try_get::, _>("artworkurl")?, + "episodetitle": row.try_get::("episodetitle")?, + "episodepubdate": episodepubdate, + "episodedescription": row.try_get::, _>("episodedescription")?, + "episodeartwork": row.try_get::, _>("episodeartwork")?, + "episodeurl": row.try_get::("episodeurl")?, + "episodeduration": row.try_get::, _>("episodeduration")?, + "episodeid": row.try_get::("episodeid")?, + "websiteurl": row.try_get::, _>("websiteurl")?, + "listenposition": None::, // No user-specific data for shared episodes + "completed": row.try_get::, _>("completed")?, + "is_youtube": true + })) + } else { + Err(AppError::not_found("Episode not found")) + } } - Ok(actions) } DatabasePool::MySQL(pool) => { - let rows = sqlx::query(" - SELECT - e.EpisodeURL as podcast, - e.EpisodeURL as episode, - eh.ListenDuration as position, - CASE - WHEN eh.ListenDuration > 0 THEN 'play' - WHEN d.EpisodeID IS NOT NULL THEN 'download' - ELSE 'new' - END as action, - COALESCE(eh.ListenDate, '1970-01-01 00:00:00') as timestamp - FROM Episodes e - LEFT JOIN UserEpisodeHistory eh ON e.EpisodeID = eh.EpisodeID AND eh.UserID = ? - LEFT JOIN DownloadedEpisodes d ON e.EpisodeID = d.EpisodeID AND d.UserID = ? - WHERE (eh.UserID = ? OR d.UserID = ?) - AND COALESCE(eh.ListenDate, '1970-01-01 00:00:00') > ? - ORDER BY timestamp DESC - ") - .bind(user_id) - .bind(user_id) - .bind(user_id) - .bind(user_id) - .bind(since) - .fetch_all(pool) + // First try regular episodes + let row = sqlx::query( + "SELECT + Podcasts.PodcastID, + Podcasts.FeedURL, + Podcasts.PodcastName, + Podcasts.ArtworkURL, + Episodes.EpisodeTitle, + Episodes.EpisodePubDate, + Episodes.EpisodeDescription, + Episodes.EpisodeArtwork, + Episodes.EpisodeURL, + Episodes.EpisodeDuration, + Episodes.EpisodeID, + Podcasts.WebsiteURL, + Episodes.Completed, + FALSE as is_youtube + FROM Episodes + INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID + WHERE Episodes.EpisodeID = ?" + ) + .bind(episode_id) + .fetch_optional(pool) .await?; - - let mut actions = Vec::new(); - for row in rows { - actions.push(serde_json::json!({ - "podcast": row.try_get::("podcast")?, - "episode": row.try_get::("episode")?, - "action": row.try_get::("action")?, - "timestamp": row.try_get::, _>("timestamp")?.to_rfc3339(), - "position": row.try_get::, _>("position").unwrap_or(None) - })); + + if let Some(row) = row { + let episodepubdate = row.try_get::("EpisodePubDate")? + .format("%Y-%m-%dT%H:%M:%S").to_string(); + + Ok(serde_json::json!({ + "podcastid": row.try_get::("PodcastID")?, + "feedurl": row.try_get::, _>("FeedURL")?, + "podcastname": row.try_get::("PodcastName")?, + "artworkurl": row.try_get::, _>("ArtworkURL")?, + "episodetitle": row.try_get::("EpisodeTitle")?, + "episodepubdate": episodepubdate, + "episodedescription": row.try_get::, _>("EpisodeDescription")?, + "episodeartwork": row.try_get::, _>("EpisodeArtwork")?, + "episodeurl": row.try_get::("EpisodeURL")?, + "episodeduration": row.try_get::, _>("EpisodeDuration")?, + "episodeid": row.try_get::("EpisodeID")?, + "websiteurl": row.try_get::, _>("WebsiteURL")?, + "listenduration": None::, // No user-specific data for shared episodes + "completed": row.try_get::, _>("Completed")?, + "is_youtube": false + })) + } else { + // Try YouTube videos + let row = sqlx::query( + "SELECT + Podcasts.PodcastID, + Podcasts.FeedURL, + Podcasts.PodcastName, + Podcasts.ArtworkURL, + YouTubeVideos.VideoTitle as EpisodeTitle, + YouTubeVideos.PublishedAt as EpisodePubDate, + YouTubeVideos.VideoDescription as EpisodeDescription, + YouTubeVideos.ThumbnailURL as EpisodeArtwork, + YouTubeVideos.VideoURL as EpisodeURL, + YouTubeVideos.Duration as EpisodeDuration, + YouTubeVideos.VideoID as EpisodeID, + Podcasts.WebsiteURL, + YouTubeVideos.Completed, + TRUE as is_youtube + FROM YouTubeVideos + INNER JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID + WHERE YouTubeVideos.VideoID = ?" + ) + .bind(episode_id) + .fetch_optional(pool) + .await?; + + if let Some(row) = row { + let episodepubdate = row.try_get::("EpisodePubDate")? + .format("%Y-%m-%dT%H:%M:%S").to_string(); + + Ok(serde_json::json!({ + "podcastid": row.try_get::("PodcastID")?, + "feedurl": row.try_get::, _>("FeedURL")?, + "podcastname": row.try_get::("PodcastName")?, + "artworkurl": row.try_get::, _>("ArtworkURL")?, + "episodetitle": row.try_get::("EpisodeTitle")?, + "episodepubdate": episodepubdate, + "episodedescription": row.try_get::, _>("EpisodeDescription")?, + "episodeartwork": row.try_get::, _>("EpisodeArtwork")?, + "episodeurl": row.try_get::("EpisodeURL")?, + "episodeduration": row.try_get::, _>("EpisodeDuration")?, + "episodeid": row.try_get::("EpisodeID")?, + "websiteurl": row.try_get::, _>("WebsiteURL")?, + "listenposition": None::, // No user-specific data for shared episodes + "completed": row.try_get::, _>("Completed")?, + "is_youtube": true + })) + } else { + Err(AppError::not_found("Episode not found")) + } } - Ok(actions) } } } - // Get all user episode actions - fallback for first sync - async fn get_user_episode_actions(&self, user_id: i32) -> AppResult> { - // Use since timestamp of epoch (1970) to get all actions - let epoch = chrono::DateTime::from_timestamp(0, 0).unwrap_or_else(chrono::Utc::now); - self.get_user_episode_actions_since(user_id, epoch).await - } -} - -#[derive(Debug)] -struct RssEpisode { - title: String, - description: String, - url: String, - pub_date: String, - duration: Option, - author: Option, - artwork_url: Option, -} - -use std::collections::HashSet; - -impl DatabasePool { - // Get all users with nextcloud sync enabled - matches Python get_all_users_with_nextcloud_sync - pub async fn get_all_users_with_nextcloud_sync(&self) -> AppResult> { + pub async fn delete_playlist(&self, user_id: i32, playlist_id: i32) -> AppResult<()> { match self { DatabasePool::Postgres(pool) => { - let user_ids: Vec = sqlx::query_scalar( - r#"SELECT userid FROM "Users" WHERE pod_sync_type = 'nextcloud'"# + // Check if playlist exists and belongs to user + let playlist = sqlx::query( + r#"SELECT issystemplaylist, userid FROM "Playlists" WHERE playlistid = $1"# ) - .fetch_all(pool) + .bind(playlist_id) + .fetch_optional(pool) .await?; - Ok(user_ids) + + let playlist = playlist.ok_or_else(|| AppError::not_found("Playlist not found"))?; + + let is_system: bool = playlist.try_get("issystemplaylist")?; + let owner_id: i32 = playlist.try_get("userid")?; + + if is_system { + return Err(AppError::bad_request("Cannot delete system playlists")); + } + + if owner_id != user_id { + return Err(AppError::forbidden("Unauthorized to delete this playlist")); + } + + // Delete the playlist + sqlx::query(r#"DELETE FROM "Playlists" WHERE playlistid = $1"#) + .bind(playlist_id) + .execute(pool) + .await?; + + Ok(()) } DatabasePool::MySQL(pool) => { - let user_ids: Vec = sqlx::query_scalar( - "SELECT UserID FROM Users WHERE Pod_Sync_Type = 'nextcloud'" - ) - .fetch_all(pool) - .await?; - Ok(user_ids) + // Check if playlist exists and belongs to user + let playlist = sqlx::query("SELECT IsSystemPlaylist, UserID FROM Playlists WHERE PlaylistID = ?") + .bind(playlist_id) + .fetch_optional(pool) + .await?; + + let playlist = playlist.ok_or_else(|| AppError::not_found("Playlist not found"))?; + + let is_system: i8 = playlist.try_get("IsSystemPlaylist")?; + let owner_id: i32 = playlist.try_get("UserID")?; + + if is_system != 0 { + return Err(AppError::bad_request("Cannot delete system playlists")); + } + + if owner_id != user_id { + return Err(AppError::forbidden("Unauthorized to delete this playlist")); + } + + // Delete the playlist + sqlx::query("DELETE FROM Playlists WHERE PlaylistID = ?") + .bind(playlist_id) + .execute(pool) + .await?; + + Ok(()) } } } - // Complete implementation of sync_with_nextcloud_for_user - matches Python nextcloud sync functionality - pub async fn sync_with_nextcloud_for_user(&self, user_id: i32) -> AppResult { - tracing::info!("Starting Nextcloud sync for user {}", user_id); - - // Get user's Nextcloud configuration - let gpodder_status = self.gpodder_get_status(user_id).await?; - - // Only proceed if sync type is nextcloud - if gpodder_status.sync_type != "nextcloud" { - tracing::info!("User {} does not have Nextcloud sync enabled", user_id); - return Ok(false); - } - - // Get Nextcloud credentials from database - let (gpodder_url, username, encrypted_token) = match self { + // Get user's language preference + pub async fn get_user_language(&self, user_id: i32) -> AppResult { + match self { DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#"SELECT gpodderurl, gpodderloginname, gpoddertoken FROM "Users" WHERE userid = $1"#) + let row = sqlx::query(r#"SELECT language FROM "Users" WHERE userid = $1"#) .bind(user_id) .fetch_optional(pool) .await?; - + if let Some(row) = row { - let url: Option = row.try_get("gpodderurl")?; - let login: Option = row.try_get("gpodderloginname")?; - let token: Option = row.try_get("gpoddertoken")?; - - ( - url.ok_or_else(|| AppError::internal("Nextcloud URL not configured"))?, - login.ok_or_else(|| AppError::internal("Nextcloud username not configured"))?, - token.ok_or_else(|| AppError::internal("Nextcloud token not configured"))? - ) + Ok(row.get::, _>("language").unwrap_or_else(|| "en".to_string())) } else { - return Err(AppError::not_found("User not found")); + Ok("en".to_string()) } } DatabasePool::MySQL(pool) => { - let row = sqlx::query("SELECT GpodderUrl, GpodderLoginName, GpodderToken FROM Users WHERE UserID = ?") + let row = sqlx::query("SELECT Language FROM Users WHERE UserID = ?") .bind(user_id) .fetch_optional(pool) .await?; - + if let Some(row) = row { - let url: Option = row.try_get("GpodderUrl")?; - let login: Option = row.try_get("GpodderLoginName")?; - let token: Option = row.try_get("GpodderToken")?; - - ( - url.ok_or_else(|| AppError::internal("Nextcloud URL not configured"))?, - login.ok_or_else(|| AppError::internal("Nextcloud username not configured"))?, - token.ok_or_else(|| AppError::internal("Nextcloud token not configured"))? - ) + Ok(row.get::, _>("Language").unwrap_or_else(|| "en".to_string())) } else { - return Err(AppError::not_found("User not found")); + Ok("en".to_string()) } } - }; - - // Decrypt token using existing decrypt_password method - let password = self.decrypt_password(&encrypted_token).await?; - - // Get last sync timestamp for incremental sync - let since_timestamp = if let Some(last_sync) = self.get_last_sync_timestamp(user_id).await? { - last_sync.timestamp() - } else { - 0 - }; - - // Build Nextcloud API endpoint URLs - let base_url = if gpodder_url.ends_with('/') { - gpodder_url.trim_end_matches('/').to_string() - } else { - gpodder_url.clone() - }; - - let subscriptions_url = format!("{}/index.php/apps/gpoddersync/subscriptions", base_url); - let episode_action_url = format!("{}/index.php/apps/gpoddersync/episode_action", base_url); - - let client = reqwest::Client::new(); - let mut has_changes = false; - - // Sync subscriptions from Nextcloud - let subscriptions_response = client - .get(&subscriptions_url) - .basic_auth(&username, Some(&password)) - .query(&[("since", since_timestamp.to_string())]) - .send() - .await - .map_err(|e| AppError::internal(&format!("Failed to fetch Nextcloud subscriptions: {}", e)))?; - - if subscriptions_response.status().is_success() { - let subscription_data: serde_json::Value = subscriptions_response.json().await - .map_err(|e| AppError::internal(&format!("Failed to parse subscription response: {}", e)))?; - - // Process subscription changes - if let Some(add_list) = subscription_data.get("add").and_then(|v| v.as_array()) { - for url in add_list { - if let Some(podcast_url) = url.as_str() { - tracing::info!("Adding Nextcloud subscription: {}", podcast_url); - if let Err(e) = self.add_podcast_from_url(user_id, podcast_url, None).await { - tracing::error!("Failed to add podcast {}: {}", podcast_url, e); - } else { - has_changes = true; - } - } - } + } + } + + // Update user's language preference + pub async fn update_user_language(&self, user_id: i32, language: &str) -> AppResult { + match self { + DatabasePool::Postgres(pool) => { + let result = sqlx::query(r#"UPDATE "Users" SET language = $1 WHERE userid = $2"#) + .bind(language) + .bind(user_id) + .execute(pool) + .await?; + + Ok(result.rows_affected() > 0) } - - if let Some(remove_list) = subscription_data.get("remove").and_then(|v| v.as_array()) { - for url in remove_list { - if let Some(podcast_url) = url.as_str() { - tracing::info!("Removing Nextcloud subscription: {}", podcast_url); - if let Err(e) = self.remove_podcast_by_url(user_id, podcast_url).await { - tracing::error!("Failed to remove podcast {}: {}", podcast_url, e); - } else { - has_changes = true; + DatabasePool::MySQL(pool) => { + let result = sqlx::query("UPDATE Users SET Language = ? WHERE UserID = ?") + .bind(language) + .bind(user_id) + .execute(pool) + .await?; + + Ok(result.rows_affected() > 0) + } + } + } + + // Create missing default playlists for existing users + pub async fn create_missing_default_playlists(&self) -> AppResult<()> { + use tracing::{info, warn, error}; + + info!("🎵 Checking for missing default playlists for existing users..."); + + // Define default playlists (same as migration 032) + let default_playlists = vec![ + ("Quick Listens", "Short episodes under 15 minutes, perfect for quick breaks", Some(1), Some(900), "duration_asc", false, true, true, None, false, Some(1000), "ph-fast-forward"), + ("Longform", "Extended episodes over 1 hour, ideal for long drives or deep dives", Some(3600), None, "duration_desc", true, true, true, None, false, Some(1000), "ph-car"), + ("Currently Listening", "Episodes you've started but haven't finished", None, None, "date_desc", false, true, false, None, false, None, "ph-play"), + ("Fresh Releases", "Latest episodes from the last 24 hours", None, None, "date_desc", true, false, false, Some(24), false, None, "ph-sparkle"), + ("Weekend Marathon", "Longer episodes (30+ minutes) perfect for weekend listening", Some(1800), None, "duration_desc", true, true, true, None, true, Some(1000), "ph-couch"), + ("Commuter Mix", "Perfect-length episodes (15-45 minutes) for your daily commute", Some(900), Some(2700), "date_desc", true, true, true, None, false, Some(1000), "ph-car-simple"), + ]; + + match self { + DatabasePool::Postgres(pool) => { + // Get all existing users (excluding background user if present) + let user_rows = sqlx::query(r#"SELECT userid FROM "Users" WHERE userid > 1"#) + .fetch_all(pool) + .await?; + + info!("Found {} users to check for missing default playlists", user_rows.len()); + + for user_row in user_rows { + let user_id: i32 = user_row.try_get("userid")?; + + for (name, description, min_duration, max_duration, sort_order, include_unplayed, include_partially_played, include_played, time_filter_hours, group_by_podcast, max_episodes, icon_name) in &default_playlists { + // Check if this playlist already exists for this user + let exists: bool = sqlx::query_scalar( + r#"SELECT EXISTS(SELECT 1 FROM "Playlists" WHERE userid = $1 AND name = $2)"# + ).bind(user_id).bind(name).fetch_one(pool).await?; + + if !exists { + // Create the playlist for this user + match sqlx::query(r#" + INSERT INTO "Playlists" ( + userid, name, description, issystemplaylist, minduration, maxduration, sortorder, + includeunplayed, includepartiallyplayed, includeplayed, timefilterhours, + groupbypodcast, maxepisodes, playprogressmin, playprogressmax, podcastids, + iconname, episodecount + ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18) + "#) + .bind(user_id).bind(name).bind(description).bind(false) + .bind(min_duration).bind(max_duration).bind(sort_order) + .bind(include_unplayed).bind(include_partially_played).bind(include_played) + .bind(time_filter_hours).bind(group_by_podcast).bind(max_episodes) + .bind(0.0).bind(100.0).bind(&[] as &[i32]).bind(icon_name).bind(0) + .execute(pool).await { + Ok(_) => info!("Created playlist '{}' for user {}", name, user_id), + Err(e) => warn!("Failed to create playlist '{}' for user {}: {}", name, user_id, e), + } } } } } - } - - // Sync episode actions from Nextcloud - let episode_actions_response = client - .get(&episode_action_url) - .basic_auth(&username, Some(&password)) - .query(&[("since", since_timestamp.to_string())]) - .send() - .await - .map_err(|e| AppError::internal(&format!("Failed to fetch Nextcloud episode actions: {}", e)))?; - - if episode_actions_response.status().is_success() { - let episode_actions_data: serde_json::Value = episode_actions_response.json().await - .map_err(|e| AppError::internal(&format!("Failed to parse episode actions response: {}", e)))?; - - if let Some(actions) = episode_actions_data.get("actions").and_then(|v| v.as_array()) { - for action in actions { - if let Err(e) = self.process_nextcloud_episode_action(user_id, action).await { - tracing::error!("Failed to process episode action: {}", e); - } else { - has_changes = true; + DatabasePool::MySQL(pool) => { + // Get all existing users (excluding background user if present) + let user_rows = sqlx::query("SELECT UserID FROM Users WHERE UserID > 1") + .fetch_all(pool) + .await?; + + info!("Found {} users to check for missing default playlists", user_rows.len()); + + for user_row in user_rows { + let user_id: i32 = user_row.try_get("UserID")?; + + for (name, description, min_duration, max_duration, sort_order, include_unplayed, include_partially_played, include_played, time_filter_hours, group_by_podcast, max_episodes, icon_name) in &default_playlists { + // Check if this playlist already exists for this user + let count: i64 = sqlx::query_scalar("SELECT COUNT(*) FROM Playlists WHERE UserID = ? AND Name = ?") + .bind(user_id) + .bind(name) + .fetch_one(pool) + .await?; + + if count == 0 { + // Create the playlist for this user + match sqlx::query(" + INSERT INTO Playlists ( + UserID, Name, Description, IsSystemPlaylist, MinDuration, MaxDuration, SortOrder, + IncludeUnplayed, IncludePartiallyPlayed, IncludePlayed, TimeFilterHours, + GroupByPodcast, MaxEpisodes, PlayProgressMin, PlayProgressMax, PodcastIDs, + IconName, EpisodeCount + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + ") + .bind(user_id).bind(name).bind(description).bind(false) + .bind(min_duration).bind(max_duration).bind(sort_order) + .bind(include_unplayed).bind(include_partially_played).bind(include_played) + .bind(time_filter_hours).bind(group_by_podcast).bind(max_episodes) + .bind(0.0).bind(100.0).bind("[]").bind(icon_name).bind(0) + .execute(pool).await { + Ok(_) => info!("Created playlist '{}' for user {}", name, user_id), + Err(e) => warn!("Failed to create playlist '{}' for user {}: {}", name, user_id, e), + } + } } } } } - // Update last sync timestamp - if let Err(e) = self.update_last_sync_timestamp(user_id).await { - tracing::error!("Failed to update sync timestamp for user {}: {}", user_id, e); - } - - tracing::info!("Nextcloud sync completed for user {} - changes: {}", user_id, has_changes); - Ok(has_changes) + info!("✅ Finished checking for missing default playlists"); + Ok(()) } - - // Process individual episode action from Nextcloud - async fn process_nextcloud_episode_action(&self, user_id: i32, action: &serde_json::Value) -> AppResult<()> { - let podcast_url = action.get("podcast") - .and_then(|v| v.as_str()) - .ok_or_else(|| AppError::internal("Missing podcast URL in episode action"))?; - - let episode_url = action.get("episode") - .and_then(|v| v.as_str()) - .ok_or_else(|| AppError::internal("Missing episode URL in episode action"))?; - - let action_type = action.get("action") - .and_then(|v| v.as_str()) - .ok_or_else(|| AppError::internal("Missing action type in episode action"))?; + + // Create default playlists for a single user - shared by user creation and startup check + async fn create_default_playlists_for_user(&self, user_id: i32) -> AppResult<()> { + use tracing::{info, warn}; - // Find the episode by URL - let episode_id = match self.get_episode_id_by_url(episode_url).await { - Ok(Some(id)) => id, - Ok(None) => { - tracing::warn!("Episode not found for URL: {}", episode_url); - return Ok(()); - } - Err(_) => { - tracing::warn!("Error finding episode for URL: {}", episode_url); - return Ok(()); - } - }; + // Define default playlists (same as migration 032) + let default_playlists = vec![ + ("Quick Listens", "Short episodes under 15 minutes, perfect for quick breaks", Some(1), Some(900), "duration_asc", true, true, true, None, false, Some(1000), "ph-fast-forward"), + ("Longform", "Extended episodes over 1 hour, ideal for long drives or deep dives", Some(3600), None, "duration_desc", true, true, true, None, false, Some(1000), "ph-car"), + ("Currently Listening", "Episodes you've started but haven't finished", None, None, "date_desc", false, true, false, None, false, None, "ph-play"), + ("Fresh Releases", "Latest episodes from the last 24 hours", None, None, "date_desc", true, false, false, Some(24), false, None, "ph-sparkle"), + ("Weekend Marathon", "Longer episodes (30+ minutes) perfect for weekend listening", Some(1800), None, "duration_desc", true, true, true, None, true, Some(1000), "ph-couch"), + ("Commuter Mix", "Perfect-length episodes (15-45 minutes) for your daily commute", Some(900), Some(2700), "date_desc", true, true, true, None, false, Some(1000), "ph-car-simple"), + ]; - match action_type { - "play" => { - if let Some(position) = action.get("position").and_then(|v| v.as_i64()) { - self.save_episode_history(user_id, episode_id, position as i32, 0).await?; + match self { + DatabasePool::Postgres(pool) => { + for (name, description, min_duration, max_duration, sort_order, include_unplayed, include_partially_played, include_played, time_filter_hours, group_by_podcast, max_episodes, icon_name) in &default_playlists { + // Check if this playlist already exists for this user + let exists: bool = sqlx::query_scalar( + r#"SELECT EXISTS(SELECT 1 FROM "Playlists" WHERE userid = $1 AND name = $2)"# + ).bind(user_id).bind(name).fetch_one(pool).await?; + + if !exists { + // Create the playlist for this user + match sqlx::query(r#" + INSERT INTO "Playlists" ( + userid, name, description, issystemplaylist, minduration, maxduration, sortorder, + includeunplayed, includepartiallyplayed, includeplayed, timefilterhours, + groupbypodcast, maxepisodes, playprogressmin, playprogressmax, podcastids, + iconname, episodecount + ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18) + "#) + .bind(user_id).bind(name).bind(description).bind(false) + .bind(min_duration).bind(max_duration).bind(sort_order) + .bind(include_unplayed).bind(include_partially_played).bind(include_played) + .bind(time_filter_hours).bind(group_by_podcast).bind(max_episodes) + .bind(0.0).bind(100.0).bind(&[] as &[i32]).bind(icon_name).bind(0) + .execute(pool).await { + Ok(_) => info!("Created playlist '{}' for user {}", name, user_id), + Err(e) => warn!("Failed to create playlist '{}' for user {}: {}", name, user_id, e), + } + } } } - "download" => { - self.mark_episode_completed(episode_id, user_id, false).await?; - } - "delete" => { - // Remove episode from user's history - self.remove_episode_from_history(user_id, episode_id).await?; - } - _ => { - tracing::debug!("Unknown action type: {}", action_type); + DatabasePool::MySQL(pool) => { + for (name, description, min_duration, max_duration, sort_order, include_unplayed, include_partially_played, include_played, time_filter_hours, group_by_podcast, max_episodes, icon_name) in &default_playlists { + // Check if this playlist already exists for this user + let count: i64 = sqlx::query_scalar("SELECT COUNT(*) FROM Playlists WHERE UserID = ? AND Name = ?") + .bind(user_id) + .bind(name) + .fetch_one(pool) + .await?; + + if count == 0 { + // Create the playlist for this user + match sqlx::query(" + INSERT INTO Playlists ( + UserID, Name, Description, IsSystemPlaylist, MinDuration, MaxDuration, SortOrder, + IncludeUnplayed, IncludePartiallyPlayed, IncludePlayed, TimeFilterHours, + GroupByPodcast, MaxEpisodes, PlayProgressMin, PlayProgressMax, PodcastIDs, + IconName, EpisodeCount + ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + ") + .bind(user_id).bind(name).bind(description).bind(false) + .bind(min_duration).bind(max_duration).bind(sort_order) + .bind(include_unplayed).bind(include_partially_played).bind(include_played) + .bind(time_filter_hours).bind(group_by_podcast).bind(max_episodes) + .bind(0.0).bind(100.0).bind("[]").bind(icon_name).bind(0) + .execute(pool).await { + Ok(_) => info!("Created playlist '{}' for user {}", name, user_id), + Err(e) => warn!("Failed to create playlist '{}' for user {}: {}", name, user_id, e), + } + } + } } } Ok(()) } - // Add podcast from URL - used by Nextcloud sync - pub async fn add_podcast_from_url(&self, user_id: i32, feed_url: &str, _feed_cutoff: Option) -> AppResult<()> { - // Check if podcast already exists for this user - if self.podcast_exists_for_user(user_id, feed_url).await? { - tracing::info!("Podcast {} already exists for user {}", feed_url, user_id); - return Ok(()); - } - - // Get podcast metadata from feed URL using existing function - let podcast_values = self.get_podcast_values(feed_url, user_id, None, None).await?; + // Update episode counts for all playlists - replaces complex playlist content updates + pub async fn update_playlist_episode_counts(&self) -> AppResult<()> { + use tracing::{info, warn, debug}; - // Add podcast using existing function - let _result = self.add_podcast_from_values(&podcast_values, user_id, 30).await?; + info!("📊 Starting playlist episode count updates..."); - tracing::info!("Successfully added podcast {} for user {}", feed_url, user_id); - Ok(()) - } - - // Remove podcast by URL - used by Nextcloud sync - pub async fn remove_podcast_by_url(&self, user_id: i32, feed_url: &str) -> AppResult<()> { match self { DatabasePool::Postgres(pool) => { - let result = sqlx::query(r#"DELETE FROM "Podcasts" WHERE feedurl = $1 AND userid = $2"#) - .bind(feed_url) - .bind(user_id) - .execute(pool) - .await?; + // Get all playlists + let playlists = sqlx::query( + r#"SELECT playlistid, userid, name FROM "Playlists" ORDER BY userid, playlistid"# + ).fetch_all(pool).await?; + + info!("Found {} playlists to update counts for", playlists.len()); + + let mut updated_count = 0; + let mut failed_count = 0; + + for playlist in playlists { + let playlist_id: i32 = playlist.try_get("playlistid")?; + let user_id: i32 = playlist.try_get("userid")?; + let name: String = playlist.try_get("name")?; - if result.rows_affected() > 0 { - tracing::info!("Successfully removed podcast {} for user {}", feed_url, user_id); - } else { - tracing::info!("Podcast {} not found for user {}", feed_url, user_id); + match self.count_playlist_episodes_dynamic(playlist_id, user_id).await { + Ok(count) => { + // Update the episode_count in the playlist + match sqlx::query( + r#"UPDATE "Playlists" SET episodecount = $1 WHERE playlistid = $2"# + ).bind(count).bind(playlist_id).execute(pool).await { + Ok(_) => { + debug!("Updated playlist '{}' (ID: {}) count to {}", name, playlist_id, count); + updated_count += 1; + } + Err(e) => { + warn!("Failed to update count for playlist '{}' (ID: {}): {}", name, playlist_id, e); + failed_count += 1; + } + } + } + Err(e) => { + warn!("Failed to count episodes for playlist '{}' (ID: {}): {}", name, playlist_id, e); + failed_count += 1; + } + } } + + info!("✅ Playlist count update completed: {} updated, {} failed", updated_count, failed_count); } DatabasePool::MySQL(pool) => { - let result = sqlx::query("DELETE FROM Podcasts WHERE FeedURL = ? AND UserID = ?") - .bind(feed_url) - .bind(user_id) - .execute(pool) - .await?; + // Get all playlists + let playlists = sqlx::query( + r#"SELECT PlaylistID, UserID, Name FROM Playlists ORDER BY UserID, PlaylistID"# + ).fetch_all(pool).await?; + + info!("Found {} playlists to update counts for (MySQL)", playlists.len()); + + let mut updated_count = 0; + let mut failed_count = 0; + + for playlist in playlists { + let playlist_id: i32 = playlist.try_get("PlaylistID")?; + let user_id: i32 = playlist.try_get("UserID")?; + let name: String = playlist.try_get("Name")?; - if result.rows_affected() > 0 { - tracing::info!("Successfully removed podcast {} for user {}", feed_url, user_id); - } else { - tracing::info!("Podcast {} not found for user {}", feed_url, user_id); + match self.count_playlist_episodes_dynamic(playlist_id, user_id).await { + Ok(count) => { + // Update the episode_count in the playlist + match sqlx::query( + r#"UPDATE Playlists SET EpisodeCount = ? WHERE PlaylistID = ?"# + ).bind(count).bind(playlist_id).execute(pool).await { + Ok(_) => { + debug!("Updated MySQL playlist '{}' (ID: {}) count to {}", name, playlist_id, count); + updated_count += 1; + } + Err(e) => { + warn!("Failed to update MySQL count for playlist '{}' (ID: {}): {}", name, playlist_id, e); + failed_count += 1; + } + } + } + Err(e) => { + warn!("Failed to count MySQL episodes for playlist '{}' (ID: {}): {}", name, playlist_id, e); + failed_count += 1; + } + } } + + info!("✅ MySQL playlist count update completed: {} updated, {} failed", updated_count, failed_count); } } + Ok(()) } - // Get episode ID by URL - used by Nextcloud episode actions - pub async fn get_episode_id_by_url(&self, episode_url: &str) -> AppResult> { + // Count episodes for a playlist using the same dynamic logic (without pagination) + async fn count_playlist_episodes_dynamic(&self, playlist_id: i32, user_id: i32) -> AppResult { + use tracing::{debug, warn}; + match self { DatabasePool::Postgres(pool) => { - let row = sqlx::query(r#"SELECT episodeid FROM "Episodes" WHERE episodeurl = $1 LIMIT 1"#) - .bind(episode_url) - .fetch_optional(pool) + // Get user timezone for proper date calculations + let raw_user_timezone: String = sqlx::query_scalar( + r#"SELECT timezone FROM "Users" WHERE userid = $1"# + ).bind(user_id).fetch_optional(pool).await?.unwrap_or_else(|| "UTC".to_string()); + let raw_timezone = if raw_user_timezone.is_empty() { "UTC".to_string() } else { raw_user_timezone }; + let user_timezone = Self::map_timezone_for_postgres(&raw_timezone); + + // Get playlist configuration + let playlist_row = sqlx::query( + r#"SELECT userid, name, minduration, maxduration, sortorder, + includeunplayed, includepartiallyplayed, includeplayed, timefilterhours, + groupbypodcast, maxepisodes, playprogressmin, playprogressmax, podcastids + FROM "Playlists" WHERE playlistid = $1"# + ).bind(playlist_id).fetch_optional(pool).await?; + + let playlist = playlist_row.ok_or_else(|| crate::error::AppError::not_found("Playlist not found"))?; + + // Build count query using same logic as dynamic function + let mut query_parts = Vec::new(); + let mut where_conditions = Vec::new(); + + query_parts.push(format!(r#" + SELECT COUNT(DISTINCT e.episodeid) + FROM "Episodes" e + JOIN "Podcasts" p ON e.podcastid = p.podcastid AND p.userid = {} + LEFT JOIN "UserEpisodeHistory" h ON e.episodeid = h.episodeid AND h.userid = {}"#, + user_id, user_id + )); + + where_conditions.push("p.userid = $1".to_string()); + + // Apply all the same filters as dynamic function + if let Some(min_dur) = playlist.try_get::, _>("minduration")? { + where_conditions.push(format!("e.episodeduration >= {}", min_dur)); + } + if let Some(max_dur) = playlist.try_get::, _>("maxduration")? { + where_conditions.push(format!("e.episodeduration <= {}", max_dur)); + } + + if let Some(hours) = playlist.try_get::, _>("timefilterhours")? { + where_conditions.push(format!( + "e.episodepubdate >= (NOW() AT TIME ZONE '{}' - INTERVAL '{} hours') AT TIME ZONE '{}' AT TIME ZONE 'UTC'", + user_timezone, hours, user_timezone + )); + } + + // Handle PostgreSQL array type for podcast IDs + if let Some(podcast_ids) = playlist.try_get::>, _>("podcastids")? { + if !podcast_ids.is_empty() && !podcast_ids.contains(&-1) { + let podcast_ids_str = podcast_ids.iter().map(|id| id.to_string()).collect::>().join(","); + where_conditions.push(format!("p.podcastid IN ({})", podcast_ids_str)); + } + } + + // Play state filters + let mut play_state_conditions = Vec::new(); + if playlist.try_get::("includeunplayed")? { + play_state_conditions.push("(h.listenduration IS NULL OR h.listenduration = 0)".to_string()); + } + if playlist.try_get::("includepartiallyplayed")? { + play_state_conditions.push( + "(h.listenduration > 0 AND h.listenduration < e.episodeduration * 0.9 AND (e.episodeduration - h.listenduration) > 30)".to_string() + ); + } + if playlist.try_get::("includeplayed")? { + play_state_conditions.push( + "(h.listenduration IS NOT NULL AND (h.listenduration >= e.episodeduration * 0.9 OR (e.episodeduration - h.listenduration) <= 30))".to_string() + ); + } + + if !play_state_conditions.is_empty() { + where_conditions.push(format!("({})", play_state_conditions.join(" OR "))); + } else { + where_conditions.push("FALSE".to_string()); + } + + // Progress filters + if let Some(min_progress) = playlist.try_get::, _>("playprogressmin")? { + where_conditions.push(format!( + "(COALESCE(h.listenduration, 0)::float / NULLIF(e.episodeduration, 0)) >= {}", + min_progress / 100.0 + )); + } + if let Some(max_progress) = playlist.try_get::, _>("playprogressmax")? { + where_conditions.push(format!( + "(COALESCE(h.listenduration, 0)::float / NULLIF(e.episodeduration, 0)) <= {}", + max_progress / 100.0 + )); + } + + let where_clause = if where_conditions.is_empty() { + String::new() + } else { + format!(" WHERE {}", where_conditions.join(" AND ")) + }; + + let final_query = format!("{}{}", query_parts.join(" "), where_clause); + + let count: i64 = sqlx::query_scalar(&final_query) + .bind(user_id) + .fetch_one(pool) .await?; - - if let Some(row) = row { - Ok(Some(row.try_get("episodeid")?)) + + // Apply MaxEpisodes limit if specified + let final_count = if let Some(max_eps) = playlist.try_get::, _>("maxepisodes")? { + if max_eps > 0 { + std::cmp::min(count as i32, max_eps) + } else { + count as i32 + } } else { - Ok(None) + count as i32 + }; + + Ok(final_count) + } + DatabasePool::MySQL(pool) => { + // Similar implementation for MySQL with adjusted syntax + let raw_user_timezone: String = sqlx::query_scalar("SELECT TimeZone FROM Users WHERE UserID = ?") + .bind(user_id).fetch_optional(pool).await?.unwrap_or_else(|| "UTC".to_string()); + let raw_timezone = if raw_user_timezone.is_empty() { "UTC".to_string() } else { raw_user_timezone }; + let user_timezone = Self::map_timezone_for_postgres(&raw_timezone); + + let playlist_row = sqlx::query( + r#"SELECT UserID, Name, MinDuration, MaxDuration, SortOrder, + IncludeUnplayed, IncludePartiallyPlayed, IncludePlayed, TimeFilterHours, + GroupByPodcast, MaxEpisodes, PlayProgressMin, PlayProgressMax, PodcastIDs + FROM Playlists WHERE PlaylistID = ?"# + ).bind(playlist_id).fetch_optional(pool).await?; + + let playlist = playlist_row.ok_or_else(|| crate::error::AppError::not_found("Playlist not found"))?; + + // MySQL count query with similar logic + let mut query_parts = Vec::new(); + let mut where_conditions = Vec::new(); + + query_parts.push(format!(r#" + SELECT COUNT(DISTINCT e.EpisodeID) + FROM Episodes e + JOIN Podcasts p ON e.PodcastID = p.PodcastID AND p.UserID = {} + LEFT JOIN UserEpisodeHistory h ON e.EpisodeID = h.EpisodeID AND h.UserID = {}"#, + user_id, user_id + )); + + where_conditions.push("p.UserID = ?".to_string()); + + // Apply all MySQL filters + if let Some(min_dur) = playlist.try_get::, _>("MinDuration")? { + where_conditions.push(format!("e.EpisodeDuration >= {}", min_dur)); + } + if let Some(max_dur) = playlist.try_get::, _>("MaxDuration")? { + where_conditions.push(format!("e.EpisodeDuration <= {}", max_dur)); + } + + if let Some(hours) = playlist.try_get::, _>("TimeFilterHours")? { + where_conditions.push(format!( + "e.EpisodePubDate >= DATE_SUB(CONVERT_TZ(NOW(), 'UTC', '{}'), INTERVAL {} HOUR)", + user_timezone, hours + )); } - } - DatabasePool::MySQL(pool) => { - let row = sqlx::query("SELECT EpisodeID FROM Episodes WHERE EpisodeURL = ? LIMIT 1") - .bind(episode_url) - .fetch_optional(pool) + + // Continue with all other MySQL filters... + let where_clause = if where_conditions.is_empty() { + String::new() + } else { + format!(" WHERE {}", where_conditions.join(" AND ")) + }; + + let final_query = format!("{}{}", query_parts.join(" "), where_clause); + + let count: i64 = sqlx::query_scalar(&final_query) + .bind(user_id) + .fetch_one(pool) .await?; - - if let Some(row) = row { - Ok(Some(row.try_get("EpisodeID")?)) + + let final_count = if let Some(max_eps) = playlist.try_get::, _>("MaxEpisodes")? { + if max_eps > 0 { + std::cmp::min(count as i32, max_eps) + } else { + count as i32 + } } else { - Ok(None) + count as i32 + }; + + Ok(final_count) + } + } + } + + /// Convert chrono-tz timezone names to PostgreSQL-compatible timezone names + /// This handles the mapping between frontend timezone selections and database queries + fn map_timezone_for_postgres(user_timezone: &str) -> String { + match user_timezone { + // US timezone mappings + "US/Alaska" => "America/Anchorage".to_string(), + "US/Aleutian" => "America/Adak".to_string(), + "US/Arizona" => "America/Phoenix".to_string(), + "US/Central" => "America/Chicago".to_string(), + "US/East-Indiana" => "America/Indiana/Indianapolis".to_string(), + "US/Eastern" => "America/New_York".to_string(), + "US/Hawaii" => "Pacific/Honolulu".to_string(), + "US/Indiana-Starke" => "America/Indiana/Knox".to_string(), + "US/Michigan" => "America/Detroit".to_string(), + "US/Mountain" => "America/Denver".to_string(), + "US/Pacific" => "America/Los_Angeles".to_string(), + "US/Samoa" => "Pacific/Pago_Pago".to_string(), + + // Canada timezone mappings + "Canada/Atlantic" => "America/Halifax".to_string(), + "Canada/Central" => "America/Winnipeg".to_string(), + "Canada/Eastern" => "America/Toronto".to_string(), + "Canada/Mountain" => "America/Edmonton".to_string(), + "Canada/Newfoundland" => "America/St_Johns".to_string(), + "Canada/Pacific" => "America/Vancouver".to_string(), + "Canada/Saskatchewan" => "America/Regina".to_string(), + "Canada/Yukon" => "America/Whitehorse".to_string(), + + // Brazil timezone mappings + "Brazil/Acre" => "America/Rio_Branco".to_string(), + "Brazil/DeNoronha" => "America/Noronha".to_string(), + "Brazil/East" => "America/Sao_Paulo".to_string(), + "Brazil/West" => "America/Manaus".to_string(), + + // Chile timezone mappings + "Chile/Continental" => "America/Santiago".to_string(), + "Chile/EasterIsland" => "Pacific/Easter".to_string(), + + // Mexico timezone mappings + "Mexico/BajaNorte" => "America/Tijuana".to_string(), + "Mexico/BajaSur" => "America/Mazatlan".to_string(), + "Mexico/General" => "America/Mexico_City".to_string(), + + // Common US legacy timezone abbreviations + "EST" => "America/New_York".to_string(), + "CST" => "America/Chicago".to_string(), + "MST" => "America/Denver".to_string(), + "PST" => "America/Los_Angeles".to_string(), + "HST" => "Pacific/Honolulu".to_string(), + "EST5EDT" => "America/New_York".to_string(), + "CST6CDT" => "America/Chicago".to_string(), + "MST7MDT" => "America/Denver".to_string(), + "PST8PDT" => "America/Los_Angeles".to_string(), + + // European legacy mappings + "CET" => "Europe/Paris".to_string(), + "EET" => "Europe/Helsinki".to_string(), + "WET" => "Europe/Lisbon".to_string(), + "MET" => "Europe/Paris".to_string(), + + // Common international legacy mappings + "GMT" => "UTC".to_string(), + "GMT+0" => "UTC".to_string(), + "GMT-0" => "UTC".to_string(), + "GMT0" => "UTC".to_string(), + "Greenwich" => "UTC".to_string(), + "UCT" => "UTC".to_string(), + "Universal" => "UTC".to_string(), + "Zulu" => "UTC".to_string(), + + // Country/region legacy mappings + "Cuba" => "America/Havana".to_string(), + "Egypt" => "Africa/Cairo".to_string(), + "Eire" => "Europe/Dublin".to_string(), + "GB" => "Europe/London".to_string(), + "GB-Eire" => "Europe/London".to_string(), + "Hongkong" => "Asia/Hong_Kong".to_string(), + "Iceland" => "Atlantic/Reykjavik".to_string(), + "Iran" => "Asia/Tehran".to_string(), + "Israel" => "Asia/Jerusalem".to_string(), + "Jamaica" => "America/Jamaica".to_string(), + "Japan" => "Asia/Tokyo".to_string(), + "Kwajalein" => "Pacific/Kwajalein".to_string(), + "Libya" => "Africa/Tripoli".to_string(), + "NZ" => "Pacific/Auckland".to_string(), + "NZ-CHAT" => "Pacific/Chatham".to_string(), + "Navajo" => "America/Denver".to_string(), + "PRC" => "Asia/Shanghai".to_string(), + "Poland" => "Europe/Warsaw".to_string(), + "Portugal" => "Europe/Lisbon".to_string(), + "ROC" => "Asia/Taipei".to_string(), + "ROK" => "Asia/Seoul".to_string(), + "Singapore" => "Asia/Singapore".to_string(), + "Turkey" => "Europe/Istanbul".to_string(), + "W-SU" => "Europe/Moscow".to_string(), + + // If it's already a valid IANA timezone name or unknown, pass through + _ => { + // For unknown timezones, fall back to UTC to prevent errors + if user_timezone.is_empty() { + "UTC".to_string() + } else { + // Most chrono-tz names are already IANA compliant, so try the original first + user_timezone.to_string() } } } } - // Save episode history - used by Nextcloud episode actions - pub async fn save_episode_history(&self, user_id: i32, episode_id: i32, position: i32, _total_time: i32) -> AppResult<()> { - match self { - DatabasePool::Postgres(pool) => { - sqlx::query(r#" - INSERT INTO "UserEpisodeHistory" (userid, episodeid, listenduration, episodecompleted, episodeprogress) - VALUES ($1, $2, $3, FALSE, $4) - ON CONFLICT (userid, episodeid) - DO UPDATE SET listenduration = $3, episodeprogress = $4 - "#) - .bind(user_id) - .bind(episode_id) - .bind(position) - .bind(position) - .execute(pool) - .await?; - } - DatabasePool::MySQL(pool) => { - sqlx::query(r#" - INSERT INTO UserEpisodeHistory (UserID, EpisodeID, ListenDuration, EpisodeCompleted, EpisodeProgress) - VALUES (?, ?, ?, FALSE, ?) - ON DUPLICATE KEY UPDATE ListenDuration = ?, EpisodeProgress = ? - "#) - .bind(user_id) - .bind(episode_id) - .bind(position) - .bind(position) - .bind(position) - .bind(position) - .execute(pool) - .await?; - } - } - Ok(()) - } - - // Remove episode from history - used by Nextcloud episode actions - pub async fn remove_episode_from_history(&self, user_id: i32, episode_id: i32) -> AppResult<()> { + // Get playlist episodes dynamically without using PlaylistContents table + // ULTRA-PRECISE implementation covering ALL playlist options with timezone awareness + pub async fn get_playlist_episodes_dynamic( + &self, + playlist_id: i32, + user_id: i32 + ) -> AppResult { + use tracing::{info, debug, warn}; + + debug!("🎵 Getting dynamic playlist episodes for playlist {} user {}", playlist_id, user_id); + match self { DatabasePool::Postgres(pool) => { - sqlx::query(r#"DELETE FROM "UserEpisodeHistory" WHERE userid = $1 AND episodeid = $2"#) + // Get user timezone for proper date calculations + let raw_user_timezone: String = sqlx::query_scalar( + r#"SELECT timezone FROM "Users" WHERE userid = $1"# + ).bind(user_id).fetch_optional(pool).await?.unwrap_or_else(|| "UTC".to_string()); + let raw_timezone = if raw_user_timezone.is_empty() { "UTC".to_string() } else { raw_user_timezone }; + let user_timezone = Self::map_timezone_for_postgres(&raw_timezone); + + debug!("User {} timezone: {} -> {}", user_id, raw_timezone, user_timezone); + + // Get playlist configuration with ALL fields + let playlist_row = sqlx::query( + r#"SELECT userid, name, description, minduration, maxduration, sortorder, + includeunplayed, includepartiallyplayed, includeplayed, timefilterhours, + groupbypodcast, maxepisodes, playprogressmin, playprogressmax, podcastids, + issystemplaylist, created, iconname, episodecount + FROM "Playlists" WHERE playlistid = $1"# + ).bind(playlist_id).fetch_optional(pool).await?; + + let playlist = playlist_row.ok_or_else(|| crate::error::AppError::not_found("Playlist not found"))?; + + // Check user permissions - users can only access their own playlists + if playlist.try_get::("userid")? != user_id { + return Err(crate::error::AppError::forbidden("You can only access your own playlists")); + } + + debug!("📋 Playlist '{}' config: min_dur={:?}, max_dur={:?}, sort={}, time_filter={:?}, progress_min={:?}, progress_max={:?}", + playlist.try_get::("name")?, playlist.try_get::, _>("minduration")?, playlist.try_get::, _>("maxduration")?, playlist.try_get::("sortorder")?, + playlist.try_get::, _>("timefilterhours")?, playlist.try_get::, _>("playprogressmin")?, playlist.try_get::, _>("playprogressmax")?); + + // Build the comprehensive dynamic query + let mut query_parts = Vec::new(); + let mut where_conditions = Vec::new(); + let mut bind_values: Vec> = Vec::new(); + + // Base SELECT with all episode data needed for SavedEpisode model including podcastid + query_parts.push(format!(r#" + SELECT DISTINCT + e.episodetitle, + p.podcastname, + TO_CHAR(e.episodepubdate AT TIME ZONE '{}', 'YYYY-MM-DD"T"HH24:MI:SS"Z"') as episodepubdate, + e.episodedescription, + COALESCE(e.episodeartwork, p.artworkurl) as episodeartwork, + e.episodeurl, + e.episodeduration, + COALESCE(h.listenduration, 0) as listenduration, + e.episodeid, + COALESCE(p.websiteurl, '') as websiteurl, + -- ULTRA-PRECISE completion logic: 90% threshold OR within 30 seconds of end + CASE WHEN + h.listenduration IS NOT NULL AND ( + h.listenduration >= e.episodeduration * 0.9 OR + (e.episodeduration - h.listenduration) <= 30 + ) + THEN true ELSE false END as completed, + EXISTS(SELECT 1 FROM "SavedEpisodes" se WHERE se.episodeid = e.episodeid AND se.userid = {}) as saved, + EXISTS(SELECT 1 FROM "EpisodeQueue" eq WHERE eq.episodeid = e.episodeid AND eq.userid = {}) as queued, + EXISTS(SELECT 1 FROM "DownloadedEpisodes" de WHERE de.episodeid = e.episodeid AND de.userid = {}) as downloaded, + false as is_youtube, + p.podcastid, + -- Progress percentage for debugging + ROUND(((COALESCE(h.listenduration, 0)::float / NULLIF(e.episodeduration, 0)) * 100)::numeric, 2) as progress_percent + FROM "Episodes" e + JOIN "Podcasts" p ON e.podcastid = p.podcastid AND p.userid = {} + LEFT JOIN "UserEpisodeHistory" h ON e.episodeid = h.episodeid AND h.userid = {}"#, + user_timezone, user_id, user_id, user_id, user_id, user_id + )); + + // Base condition - always filter by user's podcasts + where_conditions.push("p.userid = $1".to_string()); + + // 1. DURATION FILTERS - exact duration matching + if let Some(min_dur) = playlist.try_get::, _>("minduration")? { + where_conditions.push(format!("e.episodeduration >= {}", min_dur)); + debug!("🕒 Added min duration filter: {} seconds", min_dur); + } + if let Some(max_dur) = playlist.try_get::, _>("maxduration")? { + where_conditions.push(format!("e.episodeduration <= {}", max_dur)); + debug!("🕒 Added max duration filter: {} seconds", max_dur); + } + + // 2. TIMEZONE-AWARE TIME FILTER - ULTRA-PRECISE datetime handling + if let Some(hours) = playlist.try_get::, _>("timefilterhours")? { + // Convert hours to user's timezone for precise "last X hours" calculation + where_conditions.push(format!( + "e.episodepubdate >= (NOW() AT TIME ZONE '{}' - INTERVAL '{} hours') AT TIME ZONE '{}' AT TIME ZONE 'UTC'", + user_timezone, hours, user_timezone + )); + debug!("📅 Added timezone-aware time filter: last {} hours in timezone {}", hours, user_timezone); + } + + // 3. PODCAST FILTER - handle PostgreSQL array of podcast IDs + if let Some(podcast_ids) = playlist.try_get::>, _>("podcastids")? { + if !podcast_ids.is_empty() && !podcast_ids.contains(&-1) { + let podcast_ids_str = podcast_ids.iter().map(|id| id.to_string()).collect::>().join(","); + where_conditions.push(format!("p.podcastid IN ({})", podcast_ids_str)); + debug!("🎙️ Added PostgreSQL podcast filter: {:?}", podcast_ids); + } else { + debug!("🎙️ PostgreSQL podcast filter contains -1 or is empty, including all podcasts"); + } + } + + // 4. ULTRA-PRECISE PLAY STATE FILTERS + let mut play_state_conditions = Vec::new(); + + if playlist.try_get::("includeunplayed")? { + // UNPLAYED: No history record OR listen duration is 0 or NULL + play_state_conditions.push("(h.listenduration IS NULL OR h.listenduration = 0)".to_string()); + debug!("▶️ Including UNPLAYED episodes"); + } + + if playlist.try_get::("includepartiallyplayed")? { + // PARTIALLY PLAYED: Has listen time > 0 but < 90% AND not within 30 seconds of end + play_state_conditions.push( + "(h.listenduration > 0 AND h.listenduration < e.episodeduration * 0.9 AND (e.episodeduration - h.listenduration) > 30)".to_string() + ); + debug!("⏸️ Including PARTIALLY PLAYED episodes (>0% and <90%, not within 30s of end)"); + } + + if playlist.try_get::("includeplayed")? { + // PLAYED: Listen duration >= 90% OR within 30 seconds of end + play_state_conditions.push( + "(h.listenduration IS NOT NULL AND (h.listenduration >= e.episodeduration * 0.9 OR (e.episodeduration - h.listenduration) <= 30))".to_string() + ); + debug!("✅ Including PLAYED episodes (>=90% or within 30s of end)"); + } + + if !play_state_conditions.is_empty() { + where_conditions.push(format!("({})", play_state_conditions.join(" OR "))); + } else { + // No play states selected - return no results + where_conditions.push("FALSE".to_string()); + warn!("⚠️ No play states selected for playlist '{}' - will return empty results", playlist.try_get::("name")?); + } + + // 5. ULTRA-PRECISE PROGRESS PERCENTAGE FILTERS + if let Some(min_progress) = playlist.try_get::, _>("playprogressmin")? { + let min_decimal = min_progress / 100.0; + where_conditions.push(format!( + "(COALESCE(h.listenduration, 0)::float / NULLIF(e.episodeduration, 0)) >= {}", + min_decimal + )); + debug!("📊 Added min progress filter: {}% ({})", min_progress, min_decimal); + } + if let Some(max_progress) = playlist.try_get::, _>("playprogressmax")? { + let max_decimal = max_progress / 100.0; + where_conditions.push(format!( + "(COALESCE(h.listenduration, 0)::float / NULLIF(e.episodeduration, 0)) <= {}", + max_decimal + )); + debug!("📊 Added max progress filter: {}% ({})", max_progress, max_decimal); + } + + // 6. ULTRA-PRECISE ORDERING with podcast grouping support + let mut order_parts = Vec::new(); + + if playlist.try_get::("groupbypodcast")? { + order_parts.push("p.podcastid".to_string()); + debug!("📚 Grouping by podcast enabled"); + } + + let sort_clause = match playlist.try_get::("sortorder")?.as_str() { + "date_asc" => "episodepubdate ASC", + "date_desc" => "episodepubdate DESC", + "duration_asc" => "episodeduration ASC", + "duration_desc" => "episodeduration DESC", + "listen_progress" => "(COALESCE(h.listenduration, 0)::float / NULLIF(e.episodeduration, 0)) DESC", + "completion" => "(COALESCE(h.listenduration, 0)::float / NULLIF(e.episodeduration, 0)) DESC", + "random" => "RANDOM()", + "title_asc" => "episodetitle ASC", + "title_desc" => "episodetitle DESC", + "podcast_asc" => "podcastname ASC", + "podcast_desc" => "podcastname DESC", + _ => { + warn!("⚠️ Unknown sort order '{}', defaulting to date_desc", playlist.try_get::("sortorder")?); + "episodepubdate DESC" + } + }; + order_parts.push(sort_clause.to_string()); + debug!("🔄 Sort order: {}", sort_clause); + + // 7. BUILD FINAL QUERY + let where_clause = if where_conditions.is_empty() { + String::new() + } else { + format!(" WHERE {}", where_conditions.join(" AND ")) + }; + + let order_clause = format!(" ORDER BY {}", order_parts.join(", ")); + + // Apply MaxEpisodes limit if specified (playlist setting, not pagination) + let limit_clause = if let Some(max_eps) = playlist.try_get::, _>("maxepisodes")? { + if max_eps > 0 { + format!(" LIMIT {}", max_eps) + } else { + String::new() + } + } else { + String::new() + }; + + let final_query = format!("{}{}{}{}", + query_parts.join(" "), where_clause, order_clause, limit_clause); + + debug!("🔍 Final dynamic playlist query: {}", final_query); + + // Execute the main query + let rows = sqlx::query(&final_query) .bind(user_id) - .bind(episode_id) - .execute(pool) + .fetch_all(pool) .await?; + + let mut episodes = Vec::new(); + for row in rows { + episodes.push(crate::models::SavedEpisode { + episodetitle: row.try_get("episodetitle")?, + podcastname: row.try_get("podcastname")?, + episodepubdate: row.try_get("episodepubdate")?, + episodedescription: row.try_get("episodedescription")?, + episodeartwork: row.try_get("episodeartwork")?, + episodeurl: row.try_get("episodeurl")?, + episodeduration: row.try_get("episodeduration")?, + listenduration: row.try_get("listenduration").ok(), + episodeid: row.try_get("episodeid")?, + websiteurl: row.try_get("websiteurl")?, + completed: row.try_get("completed")?, + saved: row.try_get("saved")?, + queued: row.try_get("queued")?, + downloaded: row.try_get("downloaded")?, + is_youtube: row.try_get("is_youtube")?, + podcastid: row.try_get("podcastid").ok(), + }); + } + + debug!("📝 Retrieved {} episodes from dynamic query", episodes.len()); + + // Create playlist info from the playlist row we already have + let playlist_info = crate::models::PlaylistInfo { + name: playlist.try_get::("name")?, + description: playlist.try_get::("description")?, + episode_count: episodes.len() as i32, + icon_name: playlist.try_get::("iconname")?, + }; + + Ok(crate::models::PlaylistEpisodesResponse { + episodes, + playlist_info, + }) } DatabasePool::MySQL(pool) => { - sqlx::query("DELETE FROM UserEpisodeHistory WHERE UserID = ? AND EpisodeID = ?") + // FULL MySQL implementation with exact same logic but MySQL syntax + let raw_user_timezone: String = sqlx::query_scalar("SELECT TimeZone FROM Users WHERE UserID = ?").bind(user_id) + .fetch_optional(pool).await?.unwrap_or_else(|| "UTC".to_string()); + let raw_timezone = if raw_user_timezone.is_empty() { "UTC".to_string() } else { raw_user_timezone }; + let user_timezone = Self::map_timezone_for_postgres(&raw_timezone); + + debug!("User {} timezone: {} -> {}", user_id, raw_timezone, user_timezone); + + let playlist_row = sqlx::query( + r#"SELECT UserID, Name, Description, MinDuration, MaxDuration, SortOrder, + IncludeUnplayed, IncludePartiallyPlayed, IncludePlayed, TimeFilterHours, + GroupByPodcast, MaxEpisodes, PlayProgressMin, PlayProgressMax, PodcastIDs, + IsSystemPlaylist, Created, IconName, EpisodeCount + FROM Playlists WHERE PlaylistID = ?"# + ).bind(playlist_id).fetch_optional(pool).await?; + + let playlist = playlist_row.ok_or_else(|| crate::error::AppError::not_found("Playlist not found"))?; + + if playlist.try_get::("UserID")? != user_id { + return Err(crate::error::AppError::forbidden("You can only access your own playlists")); + } + + debug!("📋 MySQL Playlist '{}' config loaded", playlist.try_get::("Name")?); + + // MySQL version with adjusted syntax for timezone conversion and date handling + let mut query_parts = Vec::new(); + let mut where_conditions = Vec::new(); + + query_parts.push(format!(r#" + SELECT DISTINCT + e.EpisodeTitle as episodetitle, + p.PodcastName as podcastname, + DATE_FORMAT(CONVERT_TZ(e.EpisodePubDate, 'UTC', '{}'), '%Y-%m-%dT%H:%i:%sZ') as episodepubdate, + e.EpisodeDescription as episodedescription, + COALESCE(e.EpisodeArtwork, p.ArtworkURL) as episodeartwork, + e.EpisodeURL as episodeurl, + e.EpisodeDuration as episodeduration, + COALESCE(h.ListenDuration, 0) as listenduration, + e.EpisodeID as episodeid, + COALESCE(p.WebsiteURL, '') as websiteurl, + CASE WHEN + h.ListenDuration IS NOT NULL AND ( + h.ListenDuration >= e.EpisodeDuration * 0.9 OR + (e.EpisodeDuration - h.ListenDuration) <= 30 + ) + THEN 1 ELSE 0 END as completed, + EXISTS(SELECT 1 FROM SavedEpisodes se WHERE se.EpisodeID = e.EpisodeID AND se.UserID = {}) as saved, + EXISTS(SELECT 1 FROM EpisodeQueue eq WHERE eq.EpisodeID = e.EpisodeID AND eq.UserID = {}) as queued, + EXISTS(SELECT 1 FROM DownloadedEpisodes de WHERE de.EpisodeID = e.EpisodeID AND de.UserID = {}) as downloaded, + 0 as is_youtube, + p.PodcastID as podcastid, + ROUND((COALESCE(h.ListenDuration, 0) / NULLIF(e.EpisodeDuration, 0)) * 100, 2) as progress_percent + FROM Episodes e + JOIN Podcasts p ON e.PodcastID = p.PodcastID AND p.UserID = {} + LEFT JOIN UserEpisodeHistory h ON e.EpisodeID = h.EpisodeID AND h.UserID = {}"#, + user_timezone, user_id, user_id, user_id, user_id, user_id + )); + + where_conditions.push("p.UserID = ?".to_string()); + + // Apply all the same filters with MySQL syntax + if let Some(min_dur) = playlist.try_get::, _>("MinDuration")? { + where_conditions.push(format!("e.EpisodeDuration >= {}", min_dur)); + } + if let Some(max_dur) = playlist.try_get::, _>("MaxDuration")? { + where_conditions.push(format!("e.EpisodeDuration <= {}", max_dur)); + } + + if let Some(hours) = playlist.try_get::, _>("TimeFilterHours")? { + where_conditions.push(format!( + "e.EpisodePubDate >= DATE_SUB(CONVERT_TZ(NOW(), 'UTC', '{}'), INTERVAL {} HOUR)", + user_timezone, hours + )); + debug!("📅 Added MySQL timezone-aware time filter: last {} hours in timezone {}", hours, user_timezone); + } + + // 3. PODCAST FILTER - handle JSON array of podcast IDs (MySQL) + if let Some(podcast_ids_json) = playlist.try_get::, _>("PodcastIDs")?.as_ref() { + if !podcast_ids_json.is_empty() && podcast_ids_json != "[]" && podcast_ids_json != "null" { + match serde_json::from_str::>(podcast_ids_json) { + Ok(podcast_ids) if !podcast_ids.is_empty() && !podcast_ids.contains(&-1) => { + let podcast_ids_str = podcast_ids.iter().map(|id| id.to_string()).collect::>().join(","); + where_conditions.push(format!("p.PodcastID IN ({})", podcast_ids_str)); + debug!("🎙️ Added MySQL podcast filter: {:?}", podcast_ids); + } + Ok(_) => debug!("🎙️ MySQL podcast filter contains -1 or is empty, including all podcasts"), + Err(e) => warn!("⚠️ Failed to parse MySQL podcast IDs JSON '{}': {}", podcast_ids_json, e), + } + } + } + + // 4. ULTRA-PRECISE PLAY STATE FILTERS (MySQL) + let mut play_state_conditions = Vec::new(); + + if playlist.try_get::("IncludeUnplayed")? { + play_state_conditions.push("(h.ListenDuration IS NULL OR h.ListenDuration = 0)".to_string()); + debug!("▶️ Including UNPLAYED episodes (MySQL)"); + } + + if playlist.try_get::("IncludePartiallyPlayed")? { + play_state_conditions.push( + "(h.ListenDuration > 0 AND h.ListenDuration < e.EpisodeDuration * 0.9 AND (e.EpisodeDuration - h.ListenDuration) > 30)".to_string() + ); + debug!("⏸️ Including PARTIALLY PLAYED episodes (MySQL)"); + } + + if playlist.try_get::("IncludePlayed")? { + play_state_conditions.push( + "(h.ListenDuration IS NOT NULL AND (h.ListenDuration >= e.EpisodeDuration * 0.9 OR (e.EpisodeDuration - h.ListenDuration) <= 30))".to_string() + ); + debug!("✅ Including PLAYED episodes (MySQL)"); + } + + if !play_state_conditions.is_empty() { + where_conditions.push(format!("({})", play_state_conditions.join(" OR "))); + } else { + where_conditions.push("FALSE".to_string()); + warn!("⚠️ No play states selected for MySQL playlist '{}' - will return empty results", playlist.try_get::("Name")?); + } + + // 5. ULTRA-PRECISE PROGRESS PERCENTAGE FILTERS (MySQL) + if let Some(min_progress) = playlist.try_get::, _>("PlayProgressMin")? { + let min_decimal = min_progress / 100.0; + where_conditions.push(format!( + "(COALESCE(h.ListenDuration, 0) / NULLIF(e.EpisodeDuration, 0)) >= {}", + min_decimal + )); + debug!("📊 Added MySQL min progress filter: {}% ({})", min_progress, min_decimal); + } + if let Some(max_progress) = playlist.try_get::, _>("PlayProgressMax")? { + let max_decimal = max_progress / 100.0; + where_conditions.push(format!( + "(COALESCE(h.ListenDuration, 0) / NULLIF(e.EpisodeDuration, 0)) <= {}", + max_decimal + )); + debug!("📊 Added MySQL max progress filter: {}% ({})", max_progress, max_decimal); + } + + // 6. ULTRA-PRECISE ORDERING with podcast grouping support (MySQL) + let mut order_parts = Vec::new(); + + if playlist.try_get::("GroupByPodcast")? { + order_parts.push("podcastid".to_string()); + debug!("📚 MySQL grouping by podcast enabled"); + } + + let sort_clause = match playlist.try_get::("SortOrder")?.as_str() { + "date_asc" => "episodepubdate ASC", + "date_desc" => "episodepubdate DESC", + "duration_asc" => "episodeduration ASC", + "duration_desc" => "episodeduration DESC", + "listen_progress" => "(COALESCE(h.ListenDuration, 0) / NULLIF(e.EpisodeDuration, 0)) DESC", + "completion" => "(COALESCE(h.ListenDuration, 0) / NULLIF(e.EpisodeDuration, 0)) DESC", + "random" => "RAND()", + "title_asc" => "episodetitle ASC", + "title_desc" => "episodetitle DESC", + "podcast_asc" => "podcastname ASC", + "podcast_desc" => "podcastname DESC", + _ => { + warn!("⚠️ Unknown MySQL sort order '{}', defaulting to date_desc", playlist.try_get::("SortOrder")?); + "episodepubdate DESC" + } + }; + order_parts.push(sort_clause.to_string()); + debug!("🔄 MySQL Sort order: {}", sort_clause); + + // 7. BUILD FINAL MYSQL QUERY + let where_clause = if where_conditions.is_empty() { + String::new() + } else { + format!(" WHERE {}", where_conditions.join(" AND ")) + }; + + let order_clause = format!(" ORDER BY {}", order_parts.join(", ")); + + // Apply MaxEpisodes limit if specified (MySQL) + let limit_clause = if let Some(max_eps) = playlist.try_get::, _>("MaxEpisodes")? { + if max_eps > 0 { + format!(" LIMIT {}", max_eps) + } else { + String::new() + } + } else { + String::new() + }; + + let final_query = format!("{}{}{}{}", + query_parts.join(" "), where_clause, order_clause, limit_clause); + + debug!("🔍 Final MySQL dynamic playlist query: {}", final_query); + + // Execute the main MySQL query + let rows = sqlx::query(&final_query) .bind(user_id) - .bind(episode_id) - .execute(pool) + .fetch_all(pool) .await?; + + let mut episodes = Vec::new(); + for row in rows { + episodes.push(crate::models::SavedEpisode { + episodetitle: row.try_get("episodetitle")?, + podcastname: row.try_get("podcastname")?, + episodepubdate: row.try_get("episodepubdate")?, + episodedescription: row.try_get("episodedescription")?, + episodeartwork: row.try_get("episodeartwork")?, + episodeurl: row.try_get("episodeurl")?, + episodeduration: row.try_get("episodeduration")?, + listenduration: row.try_get("listenduration").ok(), + episodeid: row.try_get("episodeid")?, + websiteurl: row.try_get("websiteurl")?, + completed: row.try_get("completed")?, + saved: row.try_get("saved")?, + queued: row.try_get("queued")?, + downloaded: row.try_get("downloaded")?, + is_youtube: row.try_get("is_youtube")?, + podcastid: row.try_get("podcastid").ok(), + }); + } + + debug!("📝 Retrieved {} episodes from MySQL dynamic query", episodes.len()); + + // Create playlist info from the MySQL playlist row we already have + let playlist_info = crate::models::PlaylistInfo { + name: playlist.try_get::("Name")?, + description: playlist.try_get::("Description")?, + episode_count: episodes.len() as i32, + icon_name: playlist.try_get::("IconName")?, + }; + + Ok(crate::models::PlaylistEpisodesResponse { + episodes, + playlist_info, + }) } } - Ok(()) } +} + +// Standalone create_playlist function that matches Python API +pub async fn create_playlist(pool: &DatabasePool, config: &Config, playlist_data: &crate::models::CreatePlaylistRequest) -> AppResult { + pool.create_playlist(config, playlist_data).await +} + +// Standalone delete_playlist function that matches Python API +pub async fn delete_playlist(pool: &DatabasePool, _config: &Config, playlist_data: &crate::models::DeletePlaylistRequest) -> AppResult<()> { + pool.delete_playlist(playlist_data.user_id, playlist_data.playlist_id).await } \ No newline at end of file diff --git a/rust-api/src/handlers/auth.rs b/rust-api/src/handlers/auth.rs index 01b6385e..74de3b72 100644 --- a/rust-api/src/handlers/auth.rs +++ b/rust-api/src/handlers/auth.rs @@ -1,7 +1,7 @@ use axum::{ extract::{Path, Query, State}, http::{HeaderMap, StatusCode}, - response::Json, + response::{Json, Html, IntoResponse}, }; use serde::{Deserialize, Serialize}; use serde_json::json; @@ -9,15 +9,14 @@ use base64::{Engine as _, engine::general_purpose::STANDARD}; use crate::{ error::{AppError, AppResult}, - handlers::{extract_api_key, validate_api_key, check_user_or_admin_access}, - services::auth::{hash_password, verify_password}, - database::{SelfServiceStatus, PublicOidcProvider}, + handlers::{extract_api_key, check_user_or_admin_access}, AppState, }; use std::collections::HashMap; use std::sync::{Arc, Mutex}; use std::time::{SystemTime, UNIX_EPOCH}; + // Global storage for password-verified sessions pending MFA // Key: session_token, Value: (user_id, timestamp) lazy_static::lazy_static! { @@ -132,6 +131,29 @@ pub struct OPMLImportRequest { pub user_id: i32, } +#[derive(Deserialize)] +pub struct ResetCodeRequest { + pub email: String, + pub username: String, +} + +#[derive(Serialize)] +pub struct ResetCodeResponse { + pub code_created: bool, +} + +#[derive(Deserialize)] +pub struct VerifyAndResetPasswordRequest { + pub reset_code: String, + pub email: String, + pub new_password: String, +} + +#[derive(Serialize)] +pub struct VerifyAndResetPasswordResponse { + pub message: String, +} + #[derive(Serialize)] pub struct CreateFirstAdminResponse { pub message: String, @@ -183,6 +205,11 @@ pub async fn get_key( headers: HeaderMap, State(state): State, ) -> Result, AppError> { + // Check if standard login is disabled in favor of OIDC-only authentication + if state.config.oidc.disable_standard_login { + return Err(AppError::forbidden("Standard username/password login is disabled. Please use OIDC authentication.")); + } + let (username, password) = extract_basic_auth(&headers)?; // Verify password @@ -202,10 +229,10 @@ pub async fn get_key( // Generate cryptographically secure session token use rand::Rng; const CHARSET: &[u8] = b"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; - let mut rng = rand::thread_rng(); + let mut rng = rand::rng(); let session_token: String = (0..32) .map(|_| { - let idx = rng.gen_range(0..CHARSET.len()); + let idx = rng.random_range(0..CHARSET.len()); CHARSET[idx] as char }) .collect(); @@ -363,6 +390,12 @@ pub async fn create_first_admin( // Don't fail the admin creation if news feed addition fails } + // Create default playlists for the new admin user + if let Err(e) = state.db_pool.create_missing_default_playlists().await { + eprintln!("Failed to create default playlists during first admin creation: {}", e); + // Don't fail the admin creation if playlist creation fails + } + Ok(Json(CreateFirstAdminResponse { message: "Admin user created successfully".to_string(), user_id, @@ -846,7 +879,7 @@ async fn process_opml_import( ).await; // Try to get podcast values and add podcast with robust error handling - match get_podcast_values_from_url(podcast_url).await { + match get_podcast_values_from_url(podcast_url, &db_pool).await { Ok(mut podcast_values) => { podcast_values.user_id = import_request.user_id; match db_pool.add_podcast(&podcast_values, 0, None, None).await { @@ -879,212 +912,169 @@ async fn process_opml_import( } // Get podcast values from URL - simplified version of Python get_podcast_values -async fn get_podcast_values_from_url(url: &str) -> Result { - use std::collections::HashMap; +async fn get_podcast_values_from_url(url: &str, db_pool: &crate::database::DatabasePool) -> Result { + // Use the same feed-rs based parsing that manual add uses (which works correctly) + // This avoids the ampersand truncation issue in the custom quick_xml parser - let client = reqwest::Client::new(); - let response = client.get(url) - .header("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36") - .send() - .await - .map_err(|e| AppError::Http(e))?; - - let content = response.text().await.map_err(|e| AppError::Http(e))?; - - // Parse RSS feed to extract podcast information with Python-style comprehensive fallbacks - use quick_xml::Reader; - use quick_xml::events::Event; - - let mut reader = Reader::from_str(&content); - reader.config_mut().trim_text(true); - - let mut metadata: HashMap = HashMap::new(); - let mut current_tag = String::new(); - let mut current_text = String::new(); - let mut current_attrs: HashMap = HashMap::new(); - let mut in_channel = false; - let mut categories: HashMap = HashMap::new(); - let mut category_counter = 0; - - loop { - match reader.read_event() { - Ok(Event::Start(ref e)) => { - current_tag = String::from_utf8_lossy(e.name().as_ref()).to_string(); - current_text.clear(); - current_attrs.clear(); - - // Track when we're in the channel section (not in items) - if current_tag == "channel" { - in_channel = true; - } else if current_tag == "item" { - in_channel = false; - } - - // Store attributes - for attr in e.attributes() { - if let Ok(attr) = attr { - let key = String::from_utf8_lossy(attr.key.as_ref()).to_string(); - let value = String::from_utf8_lossy(&attr.value).to_string(); - current_attrs.insert(key, value); - } - } - - // Handle iTunes image with href attribute (priority for artwork) - if (current_tag == "itunes:image" || current_tag == "image") && in_channel { - if let Some(href) = current_attrs.get("href") { - if !href.trim().is_empty() { - metadata.insert("itunes_image_href".to_string(), href.clone()); - } - } - } - - // Handle iTunes category attributes - if current_tag == "itunes:category" && in_channel { - if let Some(text) = current_attrs.get("text") { - categories.insert(category_counter.to_string(), text.clone()); - category_counter += 1; - } - } - } - Ok(Event::Empty(ref e)) => { - // Handle self-closing tags - current_tag = String::from_utf8_lossy(e.name().as_ref()).to_string(); - current_attrs.clear(); - - // Store attributes from self-closing tag - for attr in e.attributes() { - if let Ok(attr) = attr { - let key = String::from_utf8_lossy(attr.key.as_ref()).to_string(); - let value = String::from_utf8_lossy(&attr.value).to_string(); - current_attrs.insert(key, value); - } - } - - // Handle iTunes image with href attribute - if (current_tag == "itunes:image" || current_tag == "image") && in_channel { - if let Some(href) = current_attrs.get("href") { - if !href.trim().is_empty() { - metadata.insert("itunes_image_href".to_string(), href.clone()); - } - } - } - - // Handle iTunes category attributes - if current_tag == "itunes:category" && in_channel { - if let Some(text) = current_attrs.get("text") { - categories.insert(category_counter.to_string(), text.clone()); - category_counter += 1; - } - } - } - Ok(Event::Text(e)) => { - current_text = e.decode().unwrap_or_default().into_owned(); - } - Ok(Event::CData(e)) => { - current_text = e.decode().unwrap_or_default().into_owned(); - } - Ok(Event::End(ref e)) => { - let tag = String::from_utf8_lossy(e.name().as_ref()).to_string(); - - // Only store channel-level metadata, not item-level - if in_channel && !current_text.trim().is_empty() { - metadata.insert(tag.clone(), current_text.clone()); - } - } - Ok(Event::Eof) => break, - Err(_) => break, - _ => {} - } - } + // Use the working get_podcast_values function that manual add uses + let podcast_values_map = db_pool.get_podcast_values(url, 0, None, None).await + .map_err(|e| AppError::internal(&format!("Failed to parse podcast feed: {}", e)))?; - // Apply Python-style comprehensive fallback logic for each field - - // Title - required field with robust fallbacks - let podcast_title = metadata.get("title") - .filter(|s| !s.trim().is_empty()) - .cloned() - .unwrap_or_else(|| "Unknown Podcast".to_string()); - - // Author - multiple fallback sources like Python version - let podcast_author = metadata.get("itunes:author") - .or_else(|| metadata.get("author")) - .or_else(|| metadata.get("managingEditor")) - .or_else(|| metadata.get("dc:creator")) - .filter(|s| !s.trim().is_empty()) - .cloned() - .unwrap_or_else(|| "Unknown Author".to_string()); - - // Artwork - comprehensive fallback chain like Python version - let podcast_artwork = metadata.get("itunes_image_href") - .or_else(|| metadata.get("image_href")) - .or_else(|| metadata.get("url")) // From tags - .or_else(|| metadata.get("href")) // From attributes - .filter(|s| !s.trim().is_empty() && s.starts_with("http")) - .cloned() - .unwrap_or_else(|| String::new()); - - // Description - multiple fallback sources like Python version - let podcast_description = metadata.get("itunes:summary") - .or_else(|| metadata.get("description")) - .or_else(|| metadata.get("subtitle")) - .or_else(|| metadata.get("itunes:subtitle")) - .filter(|s| !s.trim().is_empty()) - .cloned() - .unwrap_or_else(|| "No description available".to_string()); - - // Website - link field - let podcast_website = metadata.get("link") - .filter(|s| !s.trim().is_empty() && s.starts_with("http")) - .cloned() - .unwrap_or_else(|| String::new()); - - // Explicit - handle both string and boolean values like Python - let podcast_explicit = metadata.get("itunes:explicit") - .map(|s| { - let lower = s.to_lowercase(); - lower == "yes" || lower == "true" || lower == "explicit" || lower == "1" - }) - .unwrap_or(false); + println!("🎙️ Parsed podcast: title='{}', author='{}', description_len={}", + podcast_values_map.get("podcastname").unwrap_or(&"Unknown".to_string()), + podcast_values_map.get("author").unwrap_or(&"Unknown".to_string()), + podcast_values_map.get("description").unwrap_or(&"".to_string()).len()); - println!("🎙️ Parsed podcast: title='{}', author='{}', artwork='{}', description_len={}, website='{}', explicit={}, categories_count={}", - podcast_title, podcast_author, podcast_artwork, podcast_description.len(), podcast_website, podcast_explicit, categories.len()); + // Convert HashMap to PodcastValues struct + let categories: std::collections::HashMap = + serde_json::from_str(podcast_values_map.get("categories").unwrap_or(&"{}".to_string())) + .unwrap_or_default(); Ok(crate::handlers::podcasts::PodcastValues { - pod_title: podcast_title, - pod_artwork: podcast_artwork, - pod_author: podcast_author, + pod_title: podcast_values_map.get("podcastname").unwrap_or(&"Unknown Podcast".to_string()).clone(), + pod_artwork: podcast_values_map.get("artworkurl").unwrap_or(&"".to_string()).clone(), + pod_author: podcast_values_map.get("author").unwrap_or(&"Unknown Author".to_string()).clone(), categories: categories, - pod_description: podcast_description, - pod_episode_count: 0, + pod_description: podcast_values_map.get("description").unwrap_or(&"No description available".to_string()).clone(), + pod_episode_count: podcast_values_map.get("episodecount").unwrap_or(&"0".to_string()).parse().unwrap_or(0), pod_feed_url: url.to_string(), - pod_website: podcast_website, - pod_explicit: podcast_explicit, + pod_website: podcast_values_map.get("websiteurl").unwrap_or(&"".to_string()).clone(), + pod_explicit: podcast_values_map.get("explicit").unwrap_or(&"False".to_string()) == "True", user_id: 0, // Will be set by the caller }) } // OIDC Authentication Flow Endpoints -// Store OIDC state - matches Python /api/auth/store_state endpoint +// Store OIDC state - enhanced to capture user's current URL #[derive(Deserialize)] pub struct StoreStateRequest { pub state: String, pub client_id: String, + pub origin_url: Option, // URL user was on when they clicked OIDC login + pub code_verifier: Option, // PKCE code verifier for token exchange +} + +#[derive(Serialize, Deserialize)] +struct StoredOidcState { + client_id: String, + origin_url: Option, + code_verifier: Option, // PKCE code verifier } pub async fn store_oidc_state( State(state): State, Json(request): Json, ) -> Result, AppError> { - // Store state in Redis with 10-minute expiration (matches Python implementation) + // Store state in Redis with 10-minute expiration let state_key = format!("oidc_state:{}", request.state); - state.redis_client.set_ex(&state_key, &request.client_id, 600).await + let stored_state = StoredOidcState { + client_id: request.client_id, + origin_url: request.origin_url, + code_verifier: request.code_verifier, + }; + + let state_json = serde_json::to_string(&stored_state) + .map_err(|e| AppError::internal(&format!("Failed to serialize OIDC state: {}", e)))?; + + state.redis_client.set_ex(&state_key, &state_json, 600).await .map_err(|e| AppError::internal(&format!("Failed to store OIDC state: {}", e)))?; Ok(Json(serde_json::json!({ "status": "success" }))) } +// Helper function to create proper redirect URLs for both web and mobile +fn create_oidc_redirect_url(frontend_base: &str, params: &str) -> String { + let redirect_url = if frontend_base.starts_with("pinepods://") { + // Mobile deep link - append params directly to the exact URL + if frontend_base.contains('?') { + // URL already has query params, append with & + format!("{}&{}", frontend_base, params) + } else { + // URL has no query params, append with ? + format!("{}?{}", frontend_base, params) + } + } else { + // Web callback - use traditional path + format!("{}/oauth/callback?{}", frontend_base, params) + }; + redirect_url +} + +// Helper function to create appropriate response for mobile vs web +fn create_oidc_response(frontend_base: &str, params: &str) -> axum::response::Response { + let redirect_url = create_oidc_redirect_url(frontend_base, params); + + if frontend_base.starts_with("pinepods://") { + // Mobile deep link - return HTML page with JavaScript redirect + let html_content = format!(r#" + + + PinePods Authentication + + + + + +
+ +
Authentication successful!
+
Redirecting to app...
+
+ + +"#, redirect_url); + + Html(html_content).into_response() + } else { + // Web callback - use normal redirect + axum::response::Redirect::to(&redirect_url).into_response() + } +} + // OIDC callback handler - matches Python /api/auth/callback endpoint #[derive(Deserialize)] pub struct OIDCCallbackQuery { @@ -1098,32 +1088,64 @@ pub async fn oidc_callback( State(state): State, headers: HeaderMap, Query(query): Query, -) -> Result { +) -> Result { // Construct base URL from request like Python version - EXACT match let base_url = construct_base_url_from_request(&headers)?; - let frontend_base = base_url.replace("/api", ""); + let default_frontend_base = base_url.replace("/api", ""); // Handle OAuth errors first - EXACT match to Python if let Some(error) = query.error { let error_desc = query.error_description.unwrap_or_else(|| "Unknown error".to_string()); - tracing::error!("OIDC provider error: {} - {}", error, error_desc); - return Ok(axum::response::Redirect::to(&format!("{}/oauth/callback?error=provider_error&description={}", - frontend_base, urlencoding::encode(&error_desc)))); + tracing::error!("OIDC: Provider error: {} - {}", error, error_desc); + return Ok(create_oidc_response(&default_frontend_base, &format!("error=provider_error&description={}", urlencoding::encode(&error_desc)))); } // Validate required parameters - EXACT match to Python let auth_code = query.code.ok_or_else(|| AppError::bad_request("Missing authorization code"))?; let state_param = query.state.ok_or_else(|| AppError::bad_request("Missing state parameter"))?; - // Get client_id from state - EXACT match to Python oidc_state_manager.get_client_id - let client_id = match state.redis_client.get_del(&format!("oidc_state:{}", state_param)).await { - Ok(Some(client_id)) => client_id, + // Get client_id, origin_url, and code_verifier from state + let (client_id, stored_origin_url, code_verifier) = match state.redis_client.get_del(&format!("oidc_state:{}", state_param)).await { + Ok(Some(state_json)) => { + // Try to parse as new JSON format first + if let Ok(stored_state) = serde_json::from_str::(&state_json) { + tracing::info!("OIDC: Retrieved state for client_id={}", stored_state.client_id); + (stored_state.client_id, stored_state.origin_url, stored_state.code_verifier) + } else { + // Fallback to old format (just client_id string) for backwards compatibility + (state_json, None, None) + } + }, Ok(None) => { - return Ok(axum::response::Redirect::to(&format!("{}/oauth/callback?error=invalid_state", frontend_base))); + return Ok(create_oidc_response(&default_frontend_base, "error=invalid_state")); } Err(_) => { - return Ok(axum::response::Redirect::to(&format!("{}/oauth/callback?error=internal_error", frontend_base))); + return Ok(create_oidc_response(&default_frontend_base, "error=internal_error")); + } + }; + + // Use stored origin URL if available, otherwise fall back to constructed URL + let frontend_base = if let Some(ref origin_url) = stored_origin_url { + // Check if this is a mobile deep link callback + if origin_url.starts_with("pinepods://auth/callback") { + // For mobile deep links, use the full URL directly - don't try to parse as HTTP + origin_url.clone() + } else { + // Extract just the base part (scheme + host + port) from the stored origin URL for web + // Simple string parsing to avoid adding url dependency + if let Some(protocol_end) = origin_url.find("://") { + let after_protocol = &origin_url[protocol_end + 3..]; + if let Some(path_start) = after_protocol.find('/') { + origin_url[..protocol_end + 3 + path_start].to_string() + } else { + origin_url.clone() + } + } else { + origin_url.clone() + } } + } else { + default_frontend_base.clone() }; let registered_redirect_uri = format!("{}/api/auth/callback", base_url); @@ -1132,10 +1154,10 @@ pub async fn oidc_callback( let provider_tuple = match state.db_pool.get_oidc_provider(&client_id).await { Ok(Some(provider)) => provider, Ok(None) => { - return Ok(axum::response::Redirect::to(&format!("{}/oauth/callback?error=invalid_provider", frontend_base))); + return Ok(create_oidc_response(&frontend_base, "error=invalid_provider")); } Err(_) => { - return Ok(axum::response::Redirect::to(&format!("{}/oauth/callback?error=internal_error", frontend_base))); + return Ok(create_oidc_response(&frontend_base, "error=internal_error")); } }; @@ -1144,30 +1166,55 @@ pub async fn oidc_callback( // Exchange authorization code for access token - EXACT match to Python let client = reqwest::Client::new(); + let mut form_data = vec![ + ("grant_type", "authorization_code"), + ("code", &auth_code), + ("redirect_uri", ®istered_redirect_uri), + ("client_id", &client_id), + ("client_secret", &client_secret), + ]; + + // Add PKCE code verifier if present + if let Some(ref verifier) = code_verifier { + form_data.push(("code_verifier", verifier)); + tracing::info!("OIDC: Using PKCE flow"); + } + let token_response = match client.post(&token_url) - .form(&[ - ("grant_type", "authorization_code"), - ("code", &auth_code), - ("redirect_uri", ®istered_redirect_uri), - ("client_id", &client_id), - ("client_secret", &client_secret), - ]) + .form(&form_data) .header("Accept", "application/json") .send() .await { - Ok(response) if response.status().is_success() => { - match response.json::().await { - Ok(token_data) => token_data, - Err(_) => return Ok(axum::response::Redirect::to(&format!("{}/oauth/callback?error=token_exchange_failed", frontend_base))), + Ok(response) => { + let status = response.status(); + + if status.is_success() { + match response.json::().await { + Ok(token_data) => { + tracing::info!("OIDC: Token exchange successful"); + token_data + }, + Err(e) => { + tracing::error!("OIDC: Failed to parse token response JSON: {}", e); + return Ok(create_oidc_response(&frontend_base, "error=token_exchange_failed")); + } + } + } else { + let error_text = response.text().await.unwrap_or_else(|_| "Unknown error".to_string()); + tracing::error!("OIDC: Token exchange failed with status {}: {}", status, error_text); + return Ok(create_oidc_response(&frontend_base, "error=token_exchange_failed")); } } - _ => return Ok(axum::response::Redirect::to(&format!("{}/oauth/callback?error=token_exchange_failed", frontend_base))), + Err(e) => { + tracing::error!("OIDC: Token exchange request failed: {}", e); + return Ok(create_oidc_response(&frontend_base, "error=token_exchange_failed")); + } }; let access_token = match token_response.get("access_token").and_then(|v| v.as_str()) { Some(token) => token, - None => return Ok(axum::response::Redirect::to(&format!("{}/oauth/callback?error=token_exchange_failed", frontend_base))), + None => return Ok(create_oidc_response(&frontend_base, "error=token_exchange_failed")), }; // Get user info from OIDC provider - EXACT match to Python @@ -1181,14 +1228,21 @@ pub async fn oidc_callback( Ok(response) if response.status().is_success() => { match response.json::().await { Ok(user_info) => user_info, - Err(_) => return Ok(axum::response::Redirect::to(&format!("{}/oauth/callback?error=userinfo_failed", frontend_base))), + Err(_) => return Ok(create_oidc_response(&frontend_base, "error=userinfo_failed")), } } - _ => return Ok(axum::response::Redirect::to(&format!("{}/oauth/callback?error=userinfo_failed", frontend_base))), + _ => return Ok(create_oidc_response(&frontend_base, "error=userinfo_failed")), }; // Extract email with GitHub special handling - EXACT match to Python - let mut email = userinfo_response.get(email_claim.as_deref().unwrap_or("email")).and_then(|v| v.as_str()).map(|s| s.to_string()); + let email_field = email_claim + .as_deref() + .filter(|s| !s.is_empty()) + .unwrap_or("email"); + + tracing::info!("OIDC Debug - email_claim: {:?}, email_field: {}, userinfo_response: {:?}", email_claim, email_field, userinfo_response); + + let mut email = userinfo_response.get(email_field).and_then(|v| v.as_str()).map(|s| s.to_string()); // GitHub email handling - EXACT match to Python if email.is_none() && userinfo_url.contains("api.github.com") { @@ -1225,11 +1279,11 @@ pub async fn oidc_callback( let email = match email { Some(e) => e, - None => return Ok(axum::response::Redirect::to(&format!("{}/oauth/callback?error=email_required", frontend_base))), + None => return Ok(create_oidc_response(&frontend_base, "error=email_required")), }; // Role verification - EXACT match to Python - if let (Some(roles_claim), Some(user_role)) = (roles_claim.as_ref(), user_role.as_ref()) { + if let (Some(roles_claim), Some(user_role)) = (roles_claim.as_ref().filter(|s| !s.is_empty()), user_role.as_ref().filter(|s| !s.is_empty())) { if let Some(roles) = userinfo_response.get(roles_claim).and_then(|v| v.as_array()) { let has_user_role = roles.iter().any(|r| r.as_str() == Some(user_role)); let has_admin_role = admin_role.as_ref().map_or(false, |admin_role| { @@ -1237,29 +1291,37 @@ pub async fn oidc_callback( }); if !has_user_role && !has_admin_role { - return Ok(axum::response::Redirect::to(&format!("{}/oauth/callback?error=no_access", frontend_base))); + return Ok(create_oidc_response(&frontend_base, "error=no_access")); } } else { - return Ok(axum::response::Redirect::to(&format!("{}/oauth/callback?error=no_access&details=invalid_roles", frontend_base))); + return Ok(create_oidc_response(&frontend_base, "error=no_access&details=invalid_roles")); } } // Check if user exists - EXACT match to Python let existing_user = state.db_pool.get_user_by_email(&email).await?; - let fullname = userinfo_response.get(name_claim.as_deref().unwrap_or("name")) + let name_field = name_claim + .as_deref() + .filter(|s| !s.is_empty()) + .unwrap_or("name"); + let fullname = userinfo_response.get(name_field) .and_then(|v| v.as_str()) .unwrap_or("") .to_string(); // Username claim validation - EXACT match to Python - if let Some(username_claim) = username_claim.as_ref() { + if let Some(username_claim) = username_claim.as_ref().filter(|s| !s.is_empty()) { if !userinfo_response.get(username_claim).is_some() { - return Ok(axum::response::Redirect::to(&format!("{}/oauth/callback?error=user_creation_failed&details=username_claim_missing", frontend_base))); + return Ok(create_oidc_response(&frontend_base, "error=user_creation_failed&details=username_claim_missing")); } } - let username = userinfo_response.get(username_claim.as_deref().unwrap_or("preferred_username")) + let username_field = username_claim + .as_deref() + .filter(|s| !s.is_empty()) + .unwrap_or("preferred_username"); + let username = userinfo_response.get(username_field) .and_then(|v| v.as_str()) .map(|s| s.to_string()); @@ -1274,7 +1336,7 @@ pub async fn oidc_callback( state.db_pool.set_fullname(user_id, &fullname).await?; // Update username if changed - EXACT match to Python - if let (Some(username_claim), Some(new_username)) = (username_claim.as_ref(), username.as_ref()) { + if let (Some(username_claim), Some(new_username)) = (username_claim.as_ref().filter(|s| !s.is_empty()), username.as_ref()) { if Some(new_username) != current_username.as_ref() { if !state.db_pool.check_usernames(new_username).await? { state.db_pool.set_username(user_id, new_username).await?; @@ -1283,14 +1345,15 @@ pub async fn oidc_callback( } // Update admin role - EXACT match to Python - if let (Some(roles_claim), Some(admin_role)) = (roles_claim.as_ref(), admin_role.as_ref()) { + if let (Some(roles_claim), Some(admin_role)) = (roles_claim.as_ref().filter(|s| !s.is_empty()), admin_role.as_ref().filter(|s| !s.is_empty())) { if let Some(roles) = userinfo_response.get(roles_claim).and_then(|v| v.as_array()) { let is_admin = roles.iter().any(|r| r.as_str() == Some(admin_role)); state.db_pool.set_isadmin(user_id, is_admin).await?; } } - return Ok(axum::response::Redirect::to(&format!("{}/oauth/callback?api_key={}", frontend_base, api_key))); + tracing::info!("OIDC: Login successful for existing user"); + return Ok(create_oidc_response(&frontend_base, &format!("api_key={}", api_key))); } else { // Create new user - EXACT match to Python let mut final_username = username.unwrap_or_else(|| email.split('@').next().unwrap_or(&email).to_lowercase()); @@ -1308,7 +1371,7 @@ pub async fn oidc_callback( } counter += 1; if counter > MAX_ATTEMPTS { - return Ok(axum::response::Redirect::to(&format!("{}/oauth/callback?error=username_conflict", frontend_base))); + return Ok(create_oidc_response(&frontend_base, "error=username_conflict")); } } } @@ -1319,7 +1382,7 @@ pub async fn oidc_callback( let api_key = state.db_pool.create_api_key(user_id).await?; // Set admin role for new user - EXACT match to Python - if let (Some(roles_claim), Some(admin_role)) = (roles_claim.as_ref(), admin_role.as_ref()) { + if let (Some(roles_claim), Some(admin_role)) = (roles_claim.as_ref().filter(|s| !s.is_empty()), admin_role.as_ref().filter(|s| !s.is_empty())) { if let Some(roles) = userinfo_response.get(roles_claim).and_then(|v| v.as_array()) { let is_admin = roles.iter().any(|r| r.as_str() == Some(admin_role)); state.db_pool.set_isadmin(user_id, is_admin).await?; @@ -1328,7 +1391,7 @@ pub async fn oidc_callback( user_id } - Err(_) => return Ok(axum::response::Redirect::to(&format!("{}/oauth/callback?error=user_creation_failed", frontend_base))), + Err(_) => return Ok(create_oidc_response(&frontend_base, "error=user_creation_failed")), } }; @@ -1337,8 +1400,9 @@ pub async fn oidc_callback( None => state.db_pool.create_api_key(user_id).await?, }; - // Success - EXACT match to Python - Ok(axum::response::Redirect::to(&format!("{}/oauth/callback?api_key={}", frontend_base, api_key))) + // Success - handle both web and mobile redirects + tracing::info!("OIDC: Login successful for new user"); + Ok(create_oidc_response(&frontend_base, &format!("api_key={}", api_key))) } // Update user timezone @@ -1501,6 +1565,86 @@ pub async fn update_auto_complete_seconds( } } +// Password reset endpoint - Always returns success for security (prevents username enumeration) +pub async fn reset_password_create_code( + State(state): State, + Json(request): Json, +) -> Result, AppError> { + // Check if standard login is disabled in favor of OIDC-only authentication + if state.config.oidc.disable_standard_login { + return Err(AppError::forbidden("Password reset is disabled when using OIDC-only authentication. Please use your OIDC provider for password management.")); + } + // Get email settings to check if they're configured + let email_settings = state.db_pool.get_email_settings().await?; + if let Some(settings) = email_settings { + if settings.server_name == "default_server" { + // Even if email isn't configured, return success to prevent enumeration + return Ok(Json(ResetCodeResponse { code_created: true })); + } + + // Check if user exists with given username and email + let user_exists = state.db_pool.check_reset_user(&request.username.to_lowercase(), &request.email).await.unwrap_or(false); + + if user_exists { + // Create password reset code only if user exists + if let Ok(Some(code)) = state.db_pool.reset_password_create_code(&request.email).await { + // Create email payload + let email_request = crate::handlers::settings::SendEmailRequest { + to_email: request.email.clone(), + subject: "Pinepods Password Reset Code".to_string(), + message: format!("Your password reset code is {}", code), + }; + + // Try to send the email - if it fails, silently remove the reset code + if crate::handlers::settings::send_email_with_settings(&settings, &email_request).await.is_err() { + let _ = state.db_pool.reset_password_remove_code(&request.email).await; + } + } + } + + // Always return success regardless of user existence or email sending result + Ok(Json(ResetCodeResponse { code_created: true })) + } else { + // Always return success even if email settings aren't configured + Ok(Json(ResetCodeResponse { code_created: true })) + } +} + +// Verify reset code and reset password endpoint - matches Python api_verify_and_reset_password_route exactly +pub async fn verify_and_reset_password( + State(state): State, + Json(request): Json, +) -> Result, AppError> { + // Check if standard login is disabled in favor of OIDC-only authentication + if state.config.oidc.disable_standard_login { + return Err(AppError::forbidden("Password reset is disabled when using OIDC-only authentication. Please use your OIDC provider for password management.")); + } + // Verify the reset code + let code_valid = state.db_pool.verify_reset_code(&request.email, &request.reset_code).await?; + + match code_valid { + None => { + // User not found + return Err(AppError::not_found("User not found")); + } + Some(false) => { + // Code is invalid or expired + return Err(AppError::bad_request("Code is invalid")); + } + Some(true) => { + // Code is valid, proceed with password reset + } + } + + // Reset the password (the new_password should already be hashed by the frontend) + let message = state.db_pool.reset_password_prompt(&request.email, &request.new_password).await?; + + match message { + Some(msg) => Ok(Json(VerifyAndResetPasswordResponse { message: msg })), + None => Err(AppError::internal("Failed to reset password")), + } +} + // Construct base URL from request headers (matches Python request.base_url) fn construct_base_url_from_request(headers: &HeaderMap) -> Result { // Get Host header (required) @@ -1517,6 +1661,14 @@ fn construct_base_url_from_request(headers: &HeaderMap) -> Result 0 { Some(failed_count) } else { None }, })) +} + +// Share episode - creates a shareable URL that expires in 60 days +pub async fn share_episode( + State(state): State, + axum::extract::Path(episode_id): axum::extract::Path, + headers: HeaderMap, +) -> AppResult> { + let api_key = extract_api_key(&headers)?; + validate_api_key(&state, &api_key).await?; + + // Get the user ID from the API key + let user_id = state.db_pool.get_user_id_from_api_key(&api_key).await?; + + // Generate unique share code and expiration date + let share_code = uuid::Uuid::new_v4().to_string(); + let expiration_date = chrono::Utc::now() + chrono::Duration::days(60); + + // Insert the shared episode entry + let result = state.db_pool + .add_shared_episode(episode_id, user_id, &share_code, expiration_date) + .await?; + + if result { + Ok(Json(serde_json::json!({ "url_key": share_code }))) + } else { + Err(AppError::internal("Failed to share episode")) + } +} + +// Get episode by URL key - for accessing shared episodes +pub async fn get_episode_by_url_key( + State(state): State, + axum::extract::Path(url_key): axum::extract::Path, +) -> AppResult> { + // Find the episode ID associated with the URL key + let episode_id = match state.db_pool.get_episode_id_by_share_code(&url_key).await? { + Some(id) => id, + None => return Err(AppError::not_found("Invalid or expired URL key")), + }; + + // Now retrieve the episode metadata using the special shared episode method + // This bypasses user restrictions for public shared access + let episode_data = state.db_pool + .get_shared_episode_metadata(episode_id) + .await?; + + Ok(Json(serde_json::json!({ "episode": episode_data }))) +} + +// Download episode file with metadata +pub async fn download_episode_file( + State(state): State, + axum::extract::Path(episode_id): axum::extract::Path, + headers: HeaderMap, + axum::extract::Query(params): axum::extract::Query>, +) -> AppResult { + // Try to get API key from header first, then from query parameter + let api_key = if let Ok(key) = extract_api_key(&headers) { + key + } else if let Some(key) = params.get("api_key") { + key.clone() + } else { + return Err(AppError::unauthorized("API key is required")); + }; + + validate_api_key(&state, &api_key).await?; + + let user_id = state.db_pool.get_user_id_from_api_key(&api_key).await?; + + // Get episode metadata + let episode_info = match &state.db_pool { + crate::database::DatabasePool::Postgres(pool) => { + let row = sqlx::query(r#" + SELECT e."episodeurl", e."episodetitle", p."podcastname", + e."episodepubdate", p."author", e."episodeartwork", p."artworkurl", + e."episodedescription" + FROM "Episodes" e + JOIN "Podcasts" p ON e."podcastid" = p."podcastid" + WHERE e."episodeid" = $1 + "#) + .bind(episode_id) + .fetch_one(pool) + .await?; + + ( + row.try_get::("episodeurl")?, + row.try_get::("episodetitle")?, + row.try_get::("podcastname")?, + row.try_get::, _>("episodepubdate")?, + row.try_get::, _>("author")?, + row.try_get::, _>("episodeartwork")?, + row.try_get::, _>("artworkurl")?, + row.try_get::, _>("episodedescription")? + ) + } + crate::database::DatabasePool::MySQL(pool) => { + let row = sqlx::query(" + SELECT e.EpisodeURL, e.EpisodeTitle, p.PodcastName, + e.EpisodePubDate, p.Author, e.EpisodeArtwork, p.ArtworkURL, + e.EpisodeDescription + FROM Episodes e + JOIN Podcasts p ON e.PodcastID = p.PodcastID + WHERE e.EpisodeID = ? + ") + .bind(episode_id) + .fetch_one(pool) + .await?; + + ( + row.try_get::("EpisodeURL")?, + row.try_get::("EpisodeTitle")?, + row.try_get::("PodcastName")?, + row.try_get::, _>("EpisodePubDate")?, + row.try_get::, _>("Author")?, + row.try_get::, _>("EpisodeArtwork")?, + row.try_get::, _>("ArtworkURL")?, + row.try_get::, _>("EpisodeDescription")? + ) + } + }; + + let (episode_url, episode_title, podcast_name, pub_date, author, episode_artwork, artwork_url, _description) = episode_info; + + // Download the episode file + let client = reqwest::Client::new(); + let response = client.get(&episode_url) + .send() + .await + .map_err(|e| AppError::internal(&format!("Failed to download episode: {}", e)))?; + + if !response.status().is_success() { + return Err(AppError::internal(&format!("Server returned error: {}", response.status()))); + } + + let audio_bytes = response.bytes() + .await + .map_err(|e| AppError::internal(&format!("Failed to download audio content: {}", e)))?; + + // Create a temporary file for metadata processing + let temp_dir = std::env::temp_dir(); + let temp_filename = format!("episode_{}_{}_{}.mp3", episode_id, user_id, chrono::Utc::now().timestamp()); + let temp_path = temp_dir.join(&temp_filename); + + // Write audio content to temp file + std::fs::write(&temp_path, &audio_bytes) + .map_err(|e| AppError::internal(&format!("Failed to write temp file: {}", e)))?; + + // Add metadata using the same function as server downloads + if let Err(e) = add_podcast_metadata( + &temp_path, + &episode_title, + author.as_deref().unwrap_or("Unknown"), + &podcast_name, + pub_date.as_ref(), + episode_artwork.as_deref().or(artwork_url.as_deref()) + ).await { + tracing::warn!("Failed to add metadata to downloaded episode: {}", e); + } + + // Read the file with metadata back + let final_bytes = std::fs::read(&temp_path) + .map_err(|e| AppError::internal(&format!("Failed to read processed file: {}", e)))?; + + // Clean up temp file + let _ = std::fs::remove_file(&temp_path); + + // Create safe filename for download + let safe_episode_title = episode_title.chars() + .map(|c| if c.is_alphanumeric() || c == ' ' || c == '-' || c == '_' { c } else { '_' }) + .collect::() + .trim() + .to_string(); + + let safe_podcast_name = podcast_name.chars() + .map(|c| if c.is_alphanumeric() || c == ' ' || c == '-' || c == '_' { c } else { '_' }) + .collect::() + .trim() + .to_string(); + + let pub_date_str = if let Some(date) = pub_date { + date.format("%Y-%m-%d").to_string() + } else { + chrono::Utc::now().format("%Y-%m-%d").to_string() + }; + + let filename = format!("{}_{}_-_{}.mp3", pub_date_str, safe_podcast_name, safe_episode_title); + + // Return the file with appropriate headers + let response = Response::builder() + .status(StatusCode::OK) + .header(header::CONTENT_TYPE, "audio/mpeg") + .header(header::CONTENT_DISPOSITION, format!("attachment; filename=\"{}\"", filename)) + .header(header::CONTENT_LENGTH, final_bytes.len()) + .body(axum::body::Body::from(final_bytes)) + .map_err(|e| AppError::internal(&format!("Failed to create response: {}", e)))?; + + Ok(response) +} + +// Function to add metadata to downloaded MP3 files (copied from tasks.rs) +async fn add_podcast_metadata( + file_path: &std::path::Path, + title: &str, + artist: &str, + album: &str, + date: Option<&chrono::NaiveDateTime>, + artwork_url: Option<&str>, +) -> Result<(), Box> { + use id3::TagLike; // Import the trait to use methods + use chrono::Datelike; // For year(), month(), day() methods + + // Create ID3 tag and add basic metadata + let mut tag = id3::Tag::new(); + tag.set_title(title); + tag.set_artist(artist); + tag.set_album(album); + + // Set date if available + if let Some(date) = date { + tag.set_date_recorded(id3::Timestamp { + year: date.year(), + month: Some(date.month() as u8), + day: Some(date.day() as u8), + hour: None, + minute: None, + second: None, + }); + } + + // Add genre for podcasts + tag.set_genre("Podcast"); + + // Download and add artwork if available + if let Some(artwork_url) = artwork_url { + if let Ok(artwork_data) = download_artwork(artwork_url).await { + // Determine MIME type based on the data + let mime_type = if artwork_data.starts_with(&[0xFF, 0xD8, 0xFF]) { + "image/jpeg" + } else if artwork_data.starts_with(&[0x89, 0x50, 0x4E, 0x47]) { + "image/png" + } else { + "image/jpeg" // Default fallback + }; + + tag.add_frame(id3::frame::Picture { + mime_type: mime_type.to_string(), + picture_type: id3::frame::PictureType::CoverFront, + description: "Cover".to_string(), + data: artwork_data, + }); + } + } + + // Write the tag to the file + tag.write_to_path(file_path, id3::Version::Id3v24)?; + + Ok(()) +} + +// Helper function to download artwork (copied from tasks.rs) +async fn download_artwork(url: &str) -> Result, Box> { + let client = reqwest::Client::new(); + let response = client + .get(url) + .header("User-Agent", "PinePods/1.0") + .send() + .await?; + + if response.status().is_success() { + let bytes = response.bytes().await?; + // Limit artwork size to reasonable bounds (e.g., 5MB) + if bytes.len() > 5 * 1024 * 1024 { + return Err("Artwork too large".into()); + } + Ok(bytes.to_vec()) + } else { + Err(format!("Failed to download artwork: HTTP {}", response.status()).into()) + } } \ No newline at end of file diff --git a/rust-api/src/handlers/feed.rs b/rust-api/src/handlers/feed.rs index 2a45db8a..167b6306 100644 --- a/rust-api/src/handlers/feed.rs +++ b/rust-api/src/handlers/feed.rs @@ -1,15 +1,11 @@ use axum::{ extract::{Path, Query, State, Request}, - http::HeaderMap, response::Response, }; -use serde::{Deserialize, Serialize}; -use std::collections::HashMap; -use chrono::{DateTime, Utc}; +use serde::Deserialize; use crate::{ error::AppError, - handlers::{extract_api_key, validate_api_key}, AppState, }; @@ -85,6 +81,14 @@ pub struct RssKeyInfo { } fn extract_domain_from_request(request: &Request) -> String { + // Check SERVER_URL environment variable first (includes scheme and port) + // Note: We use SERVER_URL instead of HOSTNAME because Docker automatically sets HOSTNAME to the container ID + // The startup script saves the user's HOSTNAME value to SERVER_URL before Docker overwrites it + if let Ok(server_url) = std::env::var("SERVER_URL") { + tracing::info!("Using SERVER_URL env var: {}", server_url); + return server_url; + } + // Try to get domain from Host header if let Some(host) = request.headers().get("host") { if let Ok(host_str) = host.to_str() { @@ -93,11 +97,14 @@ fn extract_domain_from_request(request: &Request) -> String { .get("x-forwarded-proto") .and_then(|h| h.to_str().ok()) .unwrap_or("http"); - - return format!("{}://{}", scheme, host_str); + + let domain = format!("{}://{}", scheme, host_str); + tracing::info!("Using Host header: {}", domain); + return domain; } } - + // Fallback + tracing::info!("Using fallback domain"); "http://localhost:8041".to_string() } \ No newline at end of file diff --git a/rust-api/src/handlers/mod.rs b/rust-api/src/handlers/mod.rs index 628d876d..2ca68a34 100644 --- a/rust-api/src/handlers/mod.rs +++ b/rust-api/src/handlers/mod.rs @@ -3,7 +3,6 @@ pub mod health; pub mod podcasts; pub mod episodes; pub mod playlists; -pub mod users; pub mod websocket; // pub mod async_tasks_examples; // File was deleted pub mod refresh; @@ -16,7 +15,7 @@ pub mod feed; // Common handler utilities use axum::{ - extract::{Query, State}, + extract::Query, http::{HeaderMap, StatusCode}, }; use crate::{ diff --git a/rust-api/src/handlers/playlists.rs b/rust-api/src/handlers/playlists.rs index eb589a56..e7ea0a6b 100644 --- a/rust-api/src/handlers/playlists.rs +++ b/rust-api/src/handlers/playlists.rs @@ -1,10 +1,61 @@ use axum::{extract::State, http::HeaderMap, response::Json}; -use serde_json::json; use crate::{ - error::AppResult, + database, + error::{AppError, AppResult}, handlers::{extract_api_key, validate_api_key}, + models::{CreatePlaylistRequest, CreatePlaylistResponse, DeletePlaylistRequest, DeletePlaylistResponse}, AppState, }; -// Playlist endpoints will be implemented here to match clientapi.py -// Examples: get_playlists, create_playlist, update_playlist, etc. \ No newline at end of file +pub async fn create_playlist( + State(state): State, + headers: HeaderMap, + Json(playlist_data): Json, +) -> AppResult> { + let api_key = extract_api_key(&headers)?; + let is_valid = validate_api_key(&state, &api_key).await?; + + if !is_valid { + return Err(AppError::unauthorized("Your API key is either invalid or does not have correct permission")); + } + + let user_id = state.db_pool.get_user_id_from_api_key(&api_key).await?; + let is_web_key = state.db_pool.is_web_key(&api_key).await?; + + if user_id != playlist_data.user_id && !is_web_key { + return Err(AppError::forbidden("You can only create playlists for yourself!")); + } + + let playlist_id = database::create_playlist(&state.db_pool, &state.config, &playlist_data).await?; + + Ok(Json(CreatePlaylistResponse { + detail: "Playlist created successfully".to_string(), + playlist_id, + })) +} + +pub async fn delete_playlist( + State(state): State, + headers: HeaderMap, + Json(playlist_data): Json, +) -> AppResult> { + let api_key = extract_api_key(&headers)?; + let is_valid = validate_api_key(&state, &api_key).await?; + + if !is_valid { + return Err(AppError::unauthorized("Your API key is either invalid or does not have correct permission")); + } + + let user_id = state.db_pool.get_user_id_from_api_key(&api_key).await?; + let is_web_key = state.db_pool.is_web_key(&api_key).await?; + + if user_id != playlist_data.user_id && !is_web_key { + return Err(AppError::forbidden("You can only delete your own playlists!")); + } + + database::delete_playlist(&state.db_pool, &state.config, &playlist_data).await?; + + Ok(Json(DeletePlaylistResponse { + detail: "Playlist deleted successfully".to_string(), + })) +} \ No newline at end of file diff --git a/rust-api/src/handlers/podcasts.rs b/rust-api/src/handlers/podcasts.rs index 508cd4e7..a1937d2a 100644 --- a/rust-api/src/handlers/podcasts.rs +++ b/rust-api/src/handlers/podcasts.rs @@ -5,7 +5,6 @@ use axum::{ }; use serde::{Deserialize, Serialize}; use std::collections::HashMap; -use chrono::{DateTime, Utc}; use crate::{ error::AppError, @@ -148,6 +147,28 @@ pub struct RemovePodcastResponse { pub success: bool, } +// Request struct for update_podcast_info - matches edit podcast functionality +#[derive(Deserialize)] +pub struct UpdatePodcastInfoRequest { + pub user_id: i32, + pub podcast_id: i32, + pub feed_url: Option, + pub username: Option, + pub password: Option, + pub podcast_name: Option, + pub description: Option, + pub author: Option, + pub artwork_url: Option, + pub website_url: Option, + pub podcast_index_id: Option, +} + +#[derive(Serialize)] +pub struct UpdatePodcastInfoResponse { + pub success: bool, + pub message: String, +} + // Query struct for get_podcast_details - matches Python endpoint #[derive(Deserialize)] pub struct GetPodcastDetailsQuery { @@ -220,18 +241,49 @@ pub async fn add_podcast( return Err(AppError::forbidden("You can only add podcasts for yourself!")); } - // Add podcast to database - let (podcast_id, first_episode_id) = state.db_pool.add_podcast( - &request.podcast_values, + // Re-parse feed URL using backend feed-rs parsing instead of trusting frontend data + let feed_url = &request.podcast_values.pod_feed_url; + let user_id = request.podcast_values.user_id; + + // Get properly parsed podcast values from feed-rs + let parsed_podcast_values = state.db_pool.get_podcast_values(feed_url, user_id, None, None).await?; + + // Convert to PodcastValues struct using backend-parsed data + let backend_podcast_values = PodcastValues { + user_id, + pod_title: parsed_podcast_values.get("podcastname").unwrap_or(&request.podcast_values.pod_title).clone(), + pod_artwork: parsed_podcast_values.get("artworkurl").unwrap_or(&"".to_string()).clone(), + pod_author: parsed_podcast_values.get("author").unwrap_or(&"".to_string()).clone(), + categories: serde_json::from_str(parsed_podcast_values.get("categories").unwrap_or(&"{}".to_string())).unwrap_or_default(), + pod_description: parsed_podcast_values.get("description").unwrap_or(&request.podcast_values.pod_description).clone(), + pod_episode_count: parsed_podcast_values.get("episodecount").unwrap_or(&"0".to_string()).parse().unwrap_or(0), + pod_feed_url: feed_url.clone(), + pod_website: parsed_podcast_values.get("websiteurl").unwrap_or(&request.podcast_values.pod_website).clone(), + pod_explicit: parsed_podcast_values.get("explicit").unwrap_or(&"False".to_string()) == "True", + }; + + // Add podcast to database immediately (without episodes) + let podcast_id = state.db_pool.add_podcast_without_episodes( + &backend_podcast_values, request.podcast_index_id.unwrap_or(0), None, // username None, // password ).await?; + // Spawn background task to add episodes + let _task_id = state.task_spawner.spawn_add_podcast_episodes_task( + podcast_id, + backend_podcast_values.pod_feed_url.clone(), + backend_podcast_values.pod_artwork.clone(), + backend_podcast_values.user_id, + None, // username + None, // password + ).await?; + Ok(Json(PodcastStatusResponse { success: true, podcast_id, - first_episode_id: first_episode_id.unwrap_or(0), + first_episode_id: 0, // Episodes will be added in background })) } @@ -766,10 +818,8 @@ pub async fn get_podcast_id( // Get podcast ID from database let podcast_id = state.db_pool.get_podcast_id(query.user_id, &query.podcast_feed, &query.podcast_title).await?; - // Return single podcast_id or null, matching Python behavior - let episodes = podcast_id; - - Ok(Json(serde_json::json!({ "episodes": episodes }))) + // Return podcast ID in properly named field + Ok(Json(serde_json::json!({ "podcast_id": podcast_id }))) } // Query parameters for download_episode_list @@ -1024,6 +1074,43 @@ pub async fn get_podcast_id_from_ep_name( Ok(Json(serde_json::json!({ "podcast_id": podcast_id }))) } +// Query parameters for get_episode_id_ep_name +#[derive(Deserialize)] +pub struct GetEpisodeIdFromEpNameQuery { + pub episode_title: String, + pub episode_url: String, + pub user_id: i32, + pub is_youtube: bool, +} + +// Get episode ID from episode URL - matches frontend call_get_episode_id function +pub async fn get_episode_id_ep_name( + Query(query): Query, + headers: HeaderMap, + State(state): State, +) -> Result, AppError> { + let api_key = extract_api_key(&headers)?; + + // Verify API key + let is_valid = state.db_pool.verify_api_key(&api_key).await?; + if !is_valid { + return Err(AppError::unauthorized("Invalid API key")); + } + + // Check authorization - users can only get their own episodes or have web key access (user ID 1) + if !check_user_access(&state, &api_key, query.user_id).await? { + return Err(AppError::forbidden("You can only return episode ids of your own episodes!")); + } + + // Get episode ID from URL + let episode_id = state.db_pool.get_episode_id_from_url(&query.episode_url, query.user_id).await?; + + match episode_id { + Some(id) => Ok(Json(serde_json::json!(id))), + None => Err(AppError::not_found("Episode not found")) + } +} + // Request for get_episode_metadata - matches Python EpisodeMetadata model #[derive(Deserialize)] pub struct EpisodeMetadataRequest { @@ -1306,6 +1393,37 @@ pub async fn fetch_podcasting_2_pod_data( Ok(Json(data)) } +#[derive(Deserialize)] +pub struct UpdateEpisodeDurationRequest { + pub episode_id: i32, + pub new_duration: i32, + pub is_youtube: bool, +} + +pub async fn update_episode_duration( + headers: HeaderMap, + State(state): State, + Json(request): Json, +) -> Result, AppError> { + let api_key = extract_api_key(&headers)?; + + // Verify API key + let is_valid = state.db_pool.verify_api_key(&api_key).await?; + if !is_valid { + return Err(AppError::unauthorized( + "Your API key is either invalid or does not have correct permission", + )); + } + + state + .db_pool + .update_episode_duration(request.episode_id, request.new_duration, request.is_youtube) + .await?; + Ok(Json( + serde_json::json!({"detail": format!("Episode duration updated to {}", request.new_duration)}), + )) +} + // Request for mark_episode_completed - matches Python MarkEpisodeCompletedData #[derive(Deserialize)] pub struct MarkEpisodeCompletedRequest { @@ -1764,7 +1882,7 @@ pub struct GetPlaylistEpisodesQuery { pub playlist_id: i32, } -// Get playlist episodes - matches Python api_get_playlist_episodes function +// Get playlist episodes - UPDATED to use dynamic playlist system pub async fn get_playlist_episodes( State(state): State, headers: HeaderMap, @@ -1780,9 +1898,14 @@ pub async fn get_playlist_episodes( return Err(AppError::forbidden("You can only view your own playlist episodes!")); } - let playlist_episodes = state.db_pool.get_playlist_episodes(query.user_id, query.playlist_id).await?; + // Use new dynamic playlist system + let playlist_response = state.db_pool.get_playlist_episodes_dynamic( + query.playlist_id, + query.user_id + ).await?; - Ok(Json(playlist_episodes)) + // Return in format expected by frontend + Ok(Json(serde_json::to_value(playlist_response)?)) } // Get podcast details - matches Python get_podcast_details endpoint @@ -1891,26 +2014,52 @@ pub async fn stream_episode( Query(query): Query, ) -> Result { let api_key = &query.api_key; - - // Try API key validation first + println!("Stream request for episode {} with api_key {} and user_id {}", episode_id, api_key, query.user_id); + + // Try RSS key validation FIRST (RSS keys are used in RSS feeds for streaming) let mut is_valid = false; let mut is_web_key = false; let mut key_user_id = None; - - if let Ok(_) = validate_api_key(&state, api_key).await { - is_valid = true; - is_web_key = state.db_pool.is_web_key(api_key).await?; - key_user_id = Some(state.db_pool.get_user_id_from_api_key(api_key).await?); + + println!("Trying RSS key validation first"); + match state.db_pool.get_rss_key_if_valid(api_key, None).await { + Ok(Some(rss_info)) => { + println!("Valid RSS key for user {}", rss_info.user_id); + is_valid = true; + // Don't set key_user_id for RSS keys - they don't need permission checks + } + Ok(None) => { + println!("Not an RSS key, trying regular API key"); + } + Err(e) => { + println!("RSS key validation error: {}", e); + } } - - // If not a valid API key, try RSS key validation + + // If not a valid RSS key, try regular API key validation if !is_valid { - if let Ok(Some(_rss_info)) = state.db_pool.get_rss_key_if_valid(api_key, None).await { - // RSS key is valid - allow access for any user (as per requirements) - is_valid = true; + match validate_api_key(&state, api_key).await { + Ok(_) => { + println!("Valid API key"); + // Try to get user_id, but don't fail if it errors (might be cached RSS key) + match state.db_pool.get_user_id_from_api_key(api_key).await { + Ok(user_id) => { + println!("API key user_id: {}", user_id); + is_valid = true; + is_web_key = state.db_pool.is_web_key(api_key).await?; + key_user_id = Some(user_id); + } + Err(e) => { + println!("Failed to get user_id for API key (might be RSS key): {}", e); + } + } + } + Err(e) => { + println!("API key validation failed: {}", e); + } } } - + if !is_valid { return Err(AppError::unauthorized("Invalid API key or RSS key")); } @@ -2194,52 +2343,212 @@ pub struct FetchPodcastFeedQuery { pub podcast_feed: String, } -// Fetch podcast feed endpoint - returns raw XML feed data +// Fetch podcast feed endpoint - returns parsed episode data using feed-rs pub async fn fetch_podcast_feed( State(state): State, headers: HeaderMap, Query(query): Query, -) -> Result { +) -> Result, AppError> { let api_key = extract_api_key(&headers)?; validate_api_key(&state, &api_key).await?; + let user_id = state.db_pool.get_user_id_from_api_key(&api_key).await?; - // Define headers that mimic a standard web browser - matches Python implementation exactly - let client = reqwest::Client::new(); - let response = client - .get(&query.podcast_feed) - .header("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36") - .header("Accept", "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8") - .header("Accept-Language", "en-US,en;q=0.5") - .header("Connection", "keep-alive") - .header("Upgrade-Insecure-Requests", "1") - .header("Cache-Control", "max-age=0") - .timeout(std::time::Duration::from_secs(30)) - .send() - .await - .map_err(|e| AppError::external_error(&format!("Request error fetching podcast feed: {}", e)))?; + // Parse feed and extract episodes using feed-rs (same logic as add_episodes but without DB insertion) + let episodes = state.db_pool.parse_feed_episodes(&query.podcast_feed, user_id).await + .map_err(|e| AppError::external_error(&format!("Failed to parse podcast feed: {}", e)))?; + + Ok(Json(serde_json::json!({ "episodes": episodes }))) +} - if !response.status().is_success() { - return Err(AppError::external_error(&format!( - "HTTP error fetching podcast feed: {} {}", - response.status().as_u16(), - response.status().canonical_reason().unwrap_or("Unknown error") - ))); +// Handler for updating podcast basic info (URL, username, password) +pub async fn update_podcast_info( + headers: HeaderMap, + State(state): State, + Json(request): Json, +) -> Result, AppError> { + let api_key = extract_api_key(&headers)?; + + // Verify API key + let is_valid = state.db_pool.verify_api_key(&api_key).await?; + if !is_valid { + return Err(AppError::unauthorized("Invalid API key")); } - // Get the response body as bytes - let content = response - .bytes() - .await - .map_err(|e| AppError::external_error(&format!("Failed to read response body: {}", e)))?; + // Check authorization - users can only modify their own podcasts + let requesting_user_id = state.db_pool.get_user_id_from_api_key(&api_key).await?; + + if !check_user_access(&state, &api_key, requesting_user_id).await? { + return Err(AppError::forbidden("You can only modify your own podcasts!")); + } + + if request.user_id != requesting_user_id { + return Err(AppError::forbidden("You can only modify your own podcasts!")); + } + + // Validate that at least one field is being updated + if request.feed_url.is_none() && request.username.is_none() && request.password.is_none() + && request.podcast_name.is_none() && request.description.is_none() && request.author.is_none() + && request.artwork_url.is_none() && request.website_url.is_none() && request.podcast_index_id.is_none() { + return Ok(Json(UpdatePodcastInfoResponse { + success: false, + message: "No fields provided to update".to_string(), + })); + } + + // Update the podcast info + let success = state.db_pool.update_podcast_info( + request.podcast_id, + request.user_id, + request.feed_url, + request.username, + request.password, + request.podcast_name, + request.description, + request.author, + request.artwork_url, + request.website_url, + request.podcast_index_id, + ).await?; - // Return the XML content with proper content-type - use axum::response::Response; - use axum::body::Body; + if success { + Ok(Json(UpdatePodcastInfoResponse { + success: true, + message: "Podcast updated successfully".to_string(), + })) + } else { + Ok(Json(UpdatePodcastInfoResponse { + success: false, + message: "Podcast not found or no changes made".to_string(), + })) + } +} + +// Request/Response structs for podcast merging +#[derive(Serialize, Deserialize, Debug)] +pub struct MergePodcastsRequest { + pub secondary_podcast_ids: Vec, +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct MergePodcastsResponse { + pub success: bool, + pub message: String, +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct UnmergePodcastResponse { + pub success: bool, + pub message: String, +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct MergedPodcastsResponse { + pub merged_podcast_ids: Vec, +} + +// Merge podcasts endpoint +pub async fn merge_podcasts( + Path(primary_podcast_id): Path, + headers: HeaderMap, + State(state): State, + Json(request): Json, +) -> Result, AppError> { + let api_key = extract_api_key(&headers)?; - let response = Response::builder() - .header("content-type", "application/xml") - .body(Body::from(content)) - .map_err(|e| AppError::external_error(&format!("Failed to create response: {}", e)))?; - - Ok(response) + // Verify API key + let is_valid = state.db_pool.verify_api_key(&api_key).await?; + if !is_valid { + return Err(AppError::unauthorized("Invalid API key")); + } + + // Get user ID from API key + let user_id = state.db_pool.get_user_id_from_api_key(&api_key).await?; + + // Validate request + if request.secondary_podcast_ids.is_empty() { + return Ok(Json(MergePodcastsResponse { + success: false, + message: "No secondary podcasts provided".to_string(), + })); + } + + // Check if primary podcast is in secondary list + if request.secondary_podcast_ids.contains(&primary_podcast_id) { + return Ok(Json(MergePodcastsResponse { + success: false, + message: "Cannot merge a podcast with itself".to_string(), + })); + } + + // Perform the merge + match state.db_pool.merge_podcasts(primary_podcast_id, &request.secondary_podcast_ids, user_id).await { + Ok(()) => Ok(Json(MergePodcastsResponse { + success: true, + message: format!("Successfully merged {} podcasts", request.secondary_podcast_ids.len()), + })), + Err(e) => Ok(Json(MergePodcastsResponse { + success: false, + message: format!("Failed to merge podcasts: {}", e), + })), + } +} + +// Unmerge podcast endpoint +pub async fn unmerge_podcast( + Path((primary_podcast_id, target_podcast_id)): Path<(i32, i32)>, + headers: HeaderMap, + State(state): State, +) -> Result, AppError> { + let api_key = extract_api_key(&headers)?; + + // Verify API key + let is_valid = state.db_pool.verify_api_key(&api_key).await?; + if !is_valid { + return Err(AppError::unauthorized("Invalid API key")); + } + + // Get user ID from API key + let user_id = state.db_pool.get_user_id_from_api_key(&api_key).await?; + + // Perform the unmerge + match state.db_pool.unmerge_podcast(primary_podcast_id, target_podcast_id, user_id).await { + Ok(()) => Ok(Json(UnmergePodcastResponse { + success: true, + message: "Successfully unmerged podcast".to_string(), + })), + Err(e) => Ok(Json(UnmergePodcastResponse { + success: false, + message: format!("Failed to unmerge podcast: {}", e), + })), + } +} + +// Get merged podcasts endpoint +pub async fn get_merged_podcasts( + Path(podcast_id): Path, + headers: HeaderMap, + State(state): State, +) -> Result, AppError> { + let api_key = extract_api_key(&headers)?; + + // Verify API key + let is_valid = state.db_pool.verify_api_key(&api_key).await?; + if !is_valid { + return Err(AppError::unauthorized("Invalid API key")); + } + + // Get user ID from API key + let user_id = state.db_pool.get_user_id_from_api_key(&api_key).await?; + + // Check if user owns the podcast + if !check_user_access(&state, &api_key, user_id).await? { + return Err(AppError::forbidden("You can only access your own podcasts")); + } + + // Get merged podcast IDs + let merged_ids = state.db_pool.get_merged_podcast_ids(podcast_id).await?; + + Ok(Json(MergedPodcastsResponse { + merged_podcast_ids: merged_ids, + })) } \ No newline at end of file diff --git a/rust-api/src/handlers/refresh.rs b/rust-api/src/handlers/refresh.rs index 2814f462..42ae3181 100644 --- a/rust-api/src/handlers/refresh.rs +++ b/rust-api/src/handlers/refresh.rs @@ -12,8 +12,7 @@ use sqlx::Row; use crate::{ error::{AppError, AppResult}, - handlers::{extract_api_key, validate_api_key, check_user_access}, - services::task_manager::TaskManager, + handlers::check_user_access, AppState, }; @@ -159,16 +158,35 @@ pub async fn refresh_gpodder_subscriptions_admin( // Background refresh function that matches Python refresh_pods exactly - NO WebSocket async fn refresh_all_podcasts_background(state: &AppState) -> AppResult<()> { - println!("refresh begin"); + println!("Running refresh"); // Get ALL podcasts from ALL users - matches Python exactly // Handle the different database types properly + let total_podcasts = match &state.db_pool { + crate::database::DatabasePool::Postgres(pool) => { + let count_row = sqlx::query(r#"SELECT COUNT(*) as total FROM "Podcasts""#) + .fetch_one(pool) + .await?; + count_row.try_get::("total")? as usize + } + crate::database::DatabasePool::MySQL(pool) => { + let count_row = sqlx::query("SELECT COUNT(*) as total FROM Podcasts") + .fetch_one(pool) + .await?; + count_row.try_get::("total")? as usize + } + }; + + println!("Running refresh for {total_podcasts} podcasts"); + let mut current_podcast = 0; + match &state.db_pool { crate::database::DatabasePool::Postgres(pool) => { let rows = sqlx::query( r#"SELECT podcastid, feedurl, artworkurl, autodownload, username, password, - isyoutubechannel, userid, COALESCE(feedurl, '') as channel_id, feedcutoffdays - FROM "Podcasts""# + isyoutubechannel, userid, COALESCE(feedurl, '') as channel_id, feedcutoffdays, podcastname + FROM "Podcasts" + WHERE COALESCE(refreshpodcast, TRUE) = TRUE"# ) .fetch_all(pool) .await?; @@ -176,7 +194,7 @@ async fn refresh_all_podcasts_background(state: &AppState) -> AppResult<()> { for result in rows { let podcast_id: i32 = result.try_get("podcastid")?; let feed_url: String = result.try_get("feedurl")?; - let artwork_url: String = result.try_get("artworkurl")?; + let artwork_url: Option = result.try_get("artworkurl").ok(); let auto_download: bool = result.try_get("autodownload")?; let username: Option = result.try_get("username").ok(); let password: Option = result.try_get("password").ok(); @@ -184,7 +202,11 @@ async fn refresh_all_podcasts_background(state: &AppState) -> AppResult<()> { let user_id: i32 = result.try_get("userid")?; let feed_cutoff: Option = result.try_get("feedcutoffdays").ok(); - println!("Running for: {}", podcast_id); + current_podcast += 1; + + // Get podcast name for better logging + let podcast_name = result.try_get::("podcastname").unwrap_or_else(|_| format!("Podcast {}", podcast_id)); + println!("Running refresh for podcast {}/{}: {}", current_podcast, total_podcasts, podcast_name); if is_youtube { // Handle YouTube channel refresh @@ -216,7 +238,7 @@ async fn refresh_all_podcasts_background(state: &AppState) -> AppResult<()> { match state.db_pool.add_episodes_with_new_list( podcast_id, &feed_url, - &artwork_url, + artwork_url.as_deref().unwrap_or(""), username.as_deref(), password.as_deref() ).await { @@ -260,8 +282,9 @@ async fn refresh_all_podcasts_background(state: &AppState) -> AppResult<()> { crate::database::DatabasePool::MySQL(pool) => { let rows = sqlx::query( "SELECT PodcastID, FeedURL, ArtworkURL, AutoDownload, Username, Password, - IsYouTubeChannel, UserID, COALESCE(FeedURL, '') as channel_id, FeedCutoffDays - FROM Podcasts" + IsYouTubeChannel, UserID, COALESCE(FeedURL, '') as channel_id, FeedCutoffDays, PodcastName + FROM Podcasts + WHERE COALESCE(RefreshPodcast, 1) = 1" ) .fetch_all(pool) .await?; @@ -269,7 +292,7 @@ async fn refresh_all_podcasts_background(state: &AppState) -> AppResult<()> { for result in rows { let podcast_id: i32 = result.try_get("PodcastID")?; let feed_url: String = result.try_get("FeedURL")?; - let artwork_url: String = result.try_get("ArtworkURL")?; + let artwork_url: Option = result.try_get("ArtworkURL").ok(); let auto_download: bool = result.try_get("AutoDownload")?; let username: Option = result.try_get("Username").ok(); let password: Option = result.try_get("Password").ok(); @@ -277,7 +300,11 @@ async fn refresh_all_podcasts_background(state: &AppState) -> AppResult<()> { let user_id: i32 = result.try_get("UserID")?; let feed_cutoff: Option = result.try_get("FeedCutoffDays").ok(); - println!("Running for: {}", podcast_id); + current_podcast += 1; + + // Get podcast name for better logging + let podcast_name = result.try_get::("PodcastName").unwrap_or_else(|_| format!("Podcast {}", podcast_id)); + println!("Running refresh for podcast {}/{}: {}", current_podcast, total_podcasts, podcast_name); if is_youtube { // Handle YouTube channel refresh @@ -309,7 +336,7 @@ async fn refresh_all_podcasts_background(state: &AppState) -> AppResult<()> { match state.db_pool.add_episodes_with_new_list( podcast_id, &feed_url, - &artwork_url, + artwork_url.as_deref().unwrap_or(""), username.as_deref(), password.as_deref() ).await { @@ -352,7 +379,40 @@ async fn refresh_all_podcasts_background(state: &AppState) -> AppResult<()> { } } - println!("Background refresh completed"); + // Run auto-complete check for all users with auto-complete enabled after episode refresh + println!("Running auto-complete threshold check for all users..."); + match state.db_pool.get_users_with_auto_complete_enabled().await { + Ok(users_with_auto_complete) => { + let mut total_completed = 0; + for user_auto_complete in users_with_auto_complete { + match state.db_pool.auto_complete_user_episodes( + user_auto_complete.user_id, + user_auto_complete.auto_complete_seconds + ).await { + Ok(completed_count) => { + if completed_count > 0 { + println!("Auto-completed {} episodes for user {} (threshold: {}s)", + completed_count, user_auto_complete.user_id, user_auto_complete.auto_complete_seconds); + } + total_completed += completed_count; + } + Err(e) => { + println!("Failed to run auto-complete for user {}: {}", user_auto_complete.user_id, e); + } + } + } + if total_completed > 0 { + println!("Auto-complete threshold check completed: {} total episodes marked complete", total_completed); + } else { + println!("Auto-complete threshold check completed: no episodes needed completion"); + } + } + Err(e) => { + println!("Failed to get users with auto-complete enabled: {}", e); + } + } + + println!("Refresh completed"); Ok(()) } @@ -677,7 +737,7 @@ pub struct PodcastForRefresh { pub id: i32, pub name: String, pub feed_url: String, - pub artwork_url: String, + pub artwork_url: Option, pub is_youtube: bool, pub auto_download: bool, pub username: Option, @@ -723,7 +783,7 @@ async fn refresh_rss_feed( let new_episodes = state.db_pool.add_episodes_with_new_list( podcast.id, &podcast.feed_url, - &podcast.artwork_url, + podcast.artwork_url.as_deref().unwrap_or(""), podcast.username.as_deref(), podcast.password.as_deref() ).await?; @@ -868,6 +928,10 @@ pub async fn refresh_pods_admin_internal(state: &AppState) -> AppResult<()> { pub async fn refresh_gpodder_subscriptions_admin_internal(state: &AppState) -> AppResult<()> { tracing::info!("Starting internal GPodder sync (scheduler)"); + // Wait for GPodder service to be ready (5 second delay on startup) + tokio::time::sleep(tokio::time::Duration::from_secs(5)).await; + tracing::info!("GPodder service startup delay completed"); + // Get all users who have gPodder sync enabled (internal, external, both - NOT nextcloud) let gpodder_users = state.db_pool.get_all_users_with_gpodder_sync().await?; tracing::info!("Found {} users with GPodder sync enabled", gpodder_users.len()); diff --git a/rust-api/src/handlers/settings.rs b/rust-api/src/handlers/settings.rs index 223c97ba..20550940 100644 --- a/rust-api/src/handlers/settings.rs +++ b/rust-api/src/handlers/settings.rs @@ -7,10 +7,10 @@ use serde::{Deserialize, Serialize}; use crate::{ error::AppError, handlers::{extract_api_key, validate_api_key, check_user_access}, + models::{AvailableLanguage, LanguageUpdateRequest, UserLanguageResponse, AvailableLanguagesResponse}, AppState, }; -use std::collections::HashMap; -use sqlx::{Row, Column, ValueRef}; +use sqlx::{Row, ValueRef}; // Request struct for set_theme #[derive(Deserialize)] @@ -563,7 +563,7 @@ pub async fn save_email_settings( } state.db_pool.save_email_settings(&request.email_settings).await?; - Ok(Json(serde_json::json!({ "message": "Email settings saved." }))) + Ok(Json(serde_json::json!({ "detail": "Email settings saved." }))) } // Email settings response struct @@ -648,6 +648,133 @@ pub async fn send_test_email( Ok(Json(serde_json::json!({ "email_status": email_status }))) } +// HTML email template functions +async fn read_logo_as_base64() -> Result { + use std::path::Path; + use tokio::fs; + + let logo_path = Path::new("/var/www/html/static/assets/favicon.png"); + + if !logo_path.exists() { + return Err(AppError::internal("Logo file not found")); + } + + let logo_bytes = fs::read(logo_path).await + .map_err(|e| AppError::internal(&format!("Failed to read logo file: {}", e)))?; + + let base64_logo = base64::encode(&logo_bytes); + Ok(base64_logo) +} + +fn create_html_email_template(subject: &str, content: &str, logo_base64: &str) -> String { + format!(r#" + + + + + {} + + + + + +"#, subject, logo_base64, content) +} + // Internal email sending function using lettre async fn send_email_internal(request: &SendTestEmailRequest) -> Result { use lettre::{ @@ -661,15 +788,27 @@ async fn send_email_internal(request: &SendTestEmailRequest) -> Result📧 Test Email +

This is a test email from your PinePods server to verify your email configuration is working correctly.

+

Your message:

+

{}

+

If you received this email, your email settings are configured properly! 🎉

+ "#, request.message); + + let html_body = create_html_email_template("Test Email", &html_content, &logo_base64); + + // Create email message with HTML let email = Message::builder() .from(request.from_email.parse() .map_err(|_| AppError::bad_request("Invalid from email"))?) .to(request.to_email.parse() .map_err(|_| AppError::bad_request("Invalid to email"))?) - .subject("Test Email") - .header(ContentType::TEXT_PLAIN) - .body(request.message.clone()) + .subject("PinePods - Test Email") + .header(ContentType::TEXT_HTML) + .body(html_body) .map_err(|e| AppError::internal(&format!("Failed to build email: {}", e)))?; // Configure SMTP transport based on encryption @@ -734,8 +873,29 @@ async fn send_email_internal(request: &SendTestEmailRequest) -> Result Ok("Email sent successfully".to_string()), - Ok(Err(e)) => Err(AppError::internal(&format!("Failed to send email: {}", e))), - Err(_) => Err(AppError::internal("Email sending timed out after 30 seconds. Please check your SMTP server settings.".to_string())), + Ok(Err(e)) => { + let error_msg = format!("{}", e); + + // Provide more helpful error messages for common issues + if error_msg.contains("InvalidContentType") || error_msg.contains("corrupt message") { + let suggestion = if port == 587 { + "Port 587 typically requires StartTLS encryption, not SSL/TLS. Try changing encryption to 'StartTLS'." + } else if port == 465 { + "Port 465 typically requires SSL/TLS encryption." + } else { + "This may be a TLS/SSL configuration issue. Verify your encryption settings match your SMTP server requirements." + }; + Err(AppError::internal(&format!("SMTP connection failed: {}. {}. Original error: {}", + "TLS/SSL handshake error", suggestion, error_msg))) + } else if error_msg.contains("authentication") || error_msg.contains("auth") { + Err(AppError::internal(&format!("SMTP authentication failed: {}. Please verify your username and password.", error_msg))) + } else if error_msg.contains("connection") || error_msg.contains("timeout") { + Err(AppError::internal(&format!("SMTP connection failed: {}. Please verify server name and port.", error_msg))) + } else { + Err(AppError::internal(&format!("Failed to send email: {}", error_msg))) + } + }, + Err(_) => Err(AppError::internal("Email sending timed out after 30 seconds. Please check your SMTP server settings and network connectivity.".to_string())), } } @@ -768,7 +928,7 @@ pub async fn send_email( } // Send email using database settings -async fn send_email_with_settings( +pub async fn send_email_with_settings( settings: &EmailSettingsResponse, request: &SendEmailRequest, ) -> Result { @@ -779,15 +939,49 @@ async fn send_email_with_settings( }; use tokio::time::{timeout, Duration}; - // Create email message + // Read logo and create HTML content + let logo_base64 = read_logo_as_base64().await.unwrap_or_default(); + + // Check if this is a password reset email and format accordingly + let (html_content, final_subject) = if request.subject.contains("Password Reset") { + // Extract the reset code from the message + let reset_code = request.message.trim_start_matches("Your password reset code is "); + let content = format!(r#" +

🔐 Password Reset Request

+

You have requested a password reset for your PinePods account.

+

Please use the following code to reset your password:

+
{}
+

Important:

+
    +
  • This code will expire in 10 minutes
  • +
  • Only use this code if you requested a password reset
  • +
  • If you didn't request this, you can safely ignore this email
  • +
+

For security reasons, never share this code with anyone.

+ "#, reset_code); + (content, "PinePods - Password Reset Code".to_string()) + } else { + // For other emails, wrap the message content + let content = format!(r#" +

📧 {}

+
+ {} +
+ "#, request.subject, request.message.replace("\n", "
")); + (content, request.subject.clone()) + }; + + let html_body = create_html_email_template(&final_subject, &html_content, &logo_base64); + + // Create email message with HTML let email = Message::builder() .from(settings.from_email.parse() .map_err(|_| AppError::bad_request("Invalid from email in settings"))?) .to(request.to_email.parse() .map_err(|_| AppError::bad_request("Invalid to email"))?) - .subject(&request.subject) - .header(ContentType::TEXT_PLAIN) - .body(request.message.clone()) + .subject(&final_subject) + .header(ContentType::TEXT_HTML) + .body(html_body) .map_err(|e| AppError::internal(&format!("Failed to build email: {}", e)))?; // Configure SMTP transport based on encryption @@ -852,8 +1046,30 @@ async fn send_email_with_settings( let email_future = mailer.send(email); match timeout(Duration::from_secs(30), email_future).await { Ok(Ok(_)) => Ok("Email sent successfully".to_string()), - Ok(Err(e)) => Err(AppError::internal(&format!("Failed to send email: {}", e))), - Err(_) => Err(AppError::internal("Email sending timed out after 30 seconds. Please check your SMTP server settings.".to_string())), + Ok(Err(e)) => { + let error_msg = format!("{}", e); + let port = settings.server_port as u16; + + // Provide more helpful error messages for common issues + if error_msg.contains("InvalidContentType") || error_msg.contains("corrupt message") { + let suggestion = if port == 587 { + "Port 587 typically requires StartTLS encryption, not SSL/TLS. Try changing encryption to 'StartTLS'." + } else if port == 465 { + "Port 465 typically requires SSL/TLS encryption." + } else { + "This may be a TLS/SSL configuration issue. Verify your encryption settings match your SMTP server requirements." + }; + Err(AppError::internal(&format!("SMTP connection failed: {}. {}. Original error: {}", + "TLS/SSL handshake error", suggestion, error_msg))) + } else if error_msg.contains("authentication") || error_msg.contains("auth") { + Err(AppError::internal(&format!("SMTP authentication failed: {}. Please verify your username and password.", error_msg))) + } else if error_msg.contains("connection") || error_msg.contains("timeout") { + Err(AppError::internal(&format!("SMTP connection failed: {}. Please verify server name and port.", error_msg))) + } else { + Err(AppError::internal(&format!("Failed to send email: {}", error_msg))) + } + }, + Err(_) => Err(AppError::internal("Email sending timed out after 30 seconds. Please check your SMTP server settings and network connectivity.".to_string())), } } @@ -1101,9 +1317,12 @@ async fn backup_server_streaming( .arg("--port").arg(&port) .arg("--user").arg(&username) .arg(format!("--password={}", database_pass)) + .arg("--skip-ssl") + .arg("--default-auth=mysql_native_password") .arg("--single-transaction") - .arg("--no-create-info") - .arg("--disable-keys") + .arg("--routines") + .arg("--triggers") + .arg("--complete-insert") .arg(&database); cmd @@ -1117,18 +1336,36 @@ async fn backup_server_streaming( let stdout = child.stdout.take() .ok_or("Failed to get stdout from backup process")?; + + let stderr = child.stderr.take() + .ok_or("Failed to get stderr from backup process")?; let stream = ReaderStream::new(stdout); let body = Body::from_stream(stream); // Spawn a task to wait for the process and handle errors tokio::spawn(async move { + // Read stderr to capture error messages + let mut stderr_reader = tokio::io::BufReader::new(stderr); + let mut stderr_output = String::new(); + use tokio::io::AsyncBufReadExt; + + // Read stderr line by line + let mut lines = stderr_reader.lines(); + while let Ok(Some(line)) = lines.next_line().await { + stderr_output.push_str(&line); + stderr_output.push('\n'); + } + match child.wait().await { Ok(status) if status.success() => { println!("Backup process completed successfully"); } Ok(status) => { println!("Backup process failed with status: {}", status); + if !stderr_output.is_empty() { + println!("Mysqldump stderr output: {}", stderr_output); + } } Err(e) => { println!("Failed to wait for backup process: {}", e); @@ -1659,6 +1896,7 @@ pub async fn delete_mfa( // Request struct for initiate_nextcloud_login #[derive(Deserialize)] pub struct InitiateNextcloudLoginRequest { + pub user_id: i32, pub nextcloud_url: String, } @@ -1671,20 +1909,25 @@ pub async fn initiate_nextcloud_login( let api_key = extract_api_key(&headers)?; validate_api_key(&state, &api_key).await?; - let user_id = state.db_pool.get_user_id_from_api_key(&api_key).await?; - let login_data = state.db_pool.initiate_nextcloud_login(user_id, &request.nextcloud_url).await?; + let key_id = state.db_pool.get_user_id_from_api_key(&api_key).await?; + + // Allow the action only if the API key belongs to the user + if key_id != request.user_id { + return Err(AppError::forbidden("You are not authorized to initiate this action.")); + } + + let login_data = state.db_pool.initiate_nextcloud_login(request.user_id, &request.nextcloud_url).await?; - Ok(Json(serde_json::json!({ - "login": login_data.login_url, - "token": login_data.token - }))) + Ok(Json(login_data.raw_response)) } // Request struct for add_nextcloud_server -#[derive(Deserialize)] +#[derive(Deserialize, Clone)] pub struct AddNextcloudServerRequest { - pub nextcloud_url: String, + pub user_id: i32, pub token: String, + pub poll_endpoint: String, + pub nextcloud_url: String, } // Add Nextcloud server - matches Python add_nextcloud_server function exactly @@ -1696,14 +1939,178 @@ pub async fn add_nextcloud_server( let api_key = extract_api_key(&headers)?; validate_api_key(&state, &api_key).await?; - let user_id = state.db_pool.get_user_id_from_api_key(&api_key).await?; - let success = state.db_pool.add_nextcloud_server(user_id, &request.nextcloud_url, &request.token).await?; + let key_id = state.db_pool.get_user_id_from_api_key(&api_key).await?; + + // Allow the action only if the API key belongs to the user + if key_id != request.user_id { + return Err(AppError::forbidden("You are not authorized to access these user details")); + } + + // Reset gPodder settings to default like Python version + state.db_pool.remove_podcast_sync(request.user_id).await?; + + // Create a task for the Nextcloud authentication polling + let task_id = state.task_manager.create_task("nextcloud_auth".to_string(), request.user_id).await?; - if success { - Ok(Json(serde_json::json!({ "status": "success" }))) - } else { - Err(AppError::internal("Failed to add Nextcloud server")) + // Start background polling task using TaskManager + let state_clone = state.clone(); + let request_clone = request.clone(); + let task_id_clone = task_id.clone(); + tokio::spawn(async move { + poll_for_auth_completion_background(state_clone, request_clone, task_id_clone).await; + }); + + // Return 200 status code before starting to poll (like Python version) + Ok(Json(serde_json::json!({ "status": "polling", "task_id": task_id }))) +} + +// Background task for polling Nextcloud auth completion +async fn poll_for_auth_completion_background(state: AppState, request: AddNextcloudServerRequest, task_id: String) { + // Update task to indicate polling has started + if let Err(e) = state.task_manager.update_task_progress(&task_id, 10.0, Some("Starting Nextcloud authentication polling...".to_string())).await { + eprintln!("Failed to update task progress: {}", e); + } + + match poll_for_auth_completion(&request.poll_endpoint, &request.token, &state.task_manager, &task_id).await { + Ok(credentials) => { + println!("Nextcloud authentication successful: {:?}", credentials); + + // Update task progress + if let Err(e) = state.task_manager.update_task_progress(&task_id, 90.0, Some("Authentication successful, saving credentials...".to_string())).await { + eprintln!("Failed to update task progress: {}", e); + } + + // Extract credentials from the response + if let (Some(app_password), Some(login_name)) = ( + credentials.get("appPassword").and_then(|v| v.as_str()), + credentials.get("loginName").and_then(|v| v.as_str()) + ) { + // Save the real credentials using the database method + match state.db_pool.save_nextcloud_credentials(request.user_id, &request.nextcloud_url, app_password, login_name).await { + Ok(_) => { + println!("Successfully added Nextcloud settings for user {}", request.user_id); + if let Err(e) = state.task_manager.complete_task(&task_id, + Some(serde_json::json!({"status": "success", "message": "Nextcloud authentication completed"})), + Some("Nextcloud authentication completed successfully".to_string())).await { + eprintln!("Failed to complete task: {}", e); + } + } + Err(e) => { + eprintln!("Failed to add Nextcloud settings: {}", e); + if let Err(e) = state.task_manager.fail_task(&task_id, format!("Failed to save Nextcloud settings: {}", e)).await { + eprintln!("Failed to fail task: {}", e); + } + } + } + } else { + eprintln!("Missing appPassword or loginName in credentials"); + if let Err(e) = state.task_manager.fail_task(&task_id, "Missing credentials in Nextcloud response".to_string()).await { + eprintln!("Failed to fail task: {}", e); + } + } + } + Err(e) => { + eprintln!("Nextcloud authentication failed: {}", e); + if let Err(e) = state.task_manager.fail_task(&task_id, format!("Authentication failed: {}", e)).await { + eprintln!("Failed to fail task: {}", e); + } + } + } +} + +// Poll for auth completion - matches Python poll_for_auth_completion function +async fn poll_for_auth_completion( + endpoint: &str, + token: &str, + task_manager: &crate::services::task_manager::TaskManager, + task_id: &str +) -> Result> { + let client = reqwest::Client::new(); + let payload = serde_json::json!({ "token": token }); + let timeout = std::time::Duration::from_secs(20 * 60); // 20 minutes timeout + let start_time = std::time::Instant::now(); + + let mut poll_count = 0; + while start_time.elapsed() < timeout { + poll_count += 1; + + // Update progress based on time elapsed (up to 80% during polling) + let elapsed_secs = start_time.elapsed().as_secs(); + let progress = 10.0 + ((elapsed_secs as f64 / (20.0 * 60.0)) * 70.0).min(70.0); + let message = format!("Waiting for user to complete authentication... (attempt {})", poll_count); + + if let Err(e) = task_manager.update_task_progress(task_id, progress, Some(message)).await { + eprintln!("Failed to update task progress during polling: {}", e); + } + + match client + .post(endpoint) + .json(&payload) + .header("Content-Type", "application/json") + .send() + .await + { + Ok(response) => { + match response.status().as_u16() { + 200 => { + let credentials = response.json::().await?; + println!("Authentication successful: {:?}", credentials); + return Ok(credentials); + } + 404 => { + // User hasn't completed auth yet, continue polling + tokio::time::sleep(std::time::Duration::from_secs(5)).await; + } + status => { + println!("Polling failed with status code {}", status); + return Err(format!("Polling for Nextcloud authentication failed with status {}", status).into()); + } + } + } + Err(e) => { + println!("Connection error, retrying: {}", e); + tokio::time::sleep(std::time::Duration::from_secs(5)).await; + } + } + } + + Err("Polling timeout reached".into()) +} + +// Helper function to save Nextcloud credentials directly to database +async fn save_nextcloud_credentials( + db_pool: &crate::database::DatabasePool, + user_id: i32, + nextcloud_url: &str, + app_password: &str, + login_name: &str +) -> crate::error::AppResult<()> { + // Encrypt the app password + let encrypted_password = db_pool.encrypt_password(app_password).await?; + + // Store Nextcloud credentials + match db_pool { + crate::database::DatabasePool::Postgres(pool) => { + sqlx::query(r#"UPDATE "Users" SET gpodderurl = $1, gpodderloginname = $2, gpoddertoken = $3, pod_sync_type = 'nextcloud' WHERE userid = $4"#) + .bind(nextcloud_url) + .bind(login_name) + .bind(&encrypted_password) + .bind(user_id) + .execute(pool) + .await?; + } + crate::database::DatabasePool::MySQL(pool) => { + sqlx::query("UPDATE Users SET GpodderUrl = ?, GpodderLoginName = ?, GpodderToken = ?, Pod_Sync_Type = 'nextcloud' WHERE UserID = ?") + .bind(nextcloud_url) + .bind(login_name) + .bind(&encrypted_password) + .bind(user_id) + .execute(pool) + .await?; + } } + + Ok(()) } // Request struct for verify_gpodder_auth @@ -1723,8 +2130,29 @@ pub async fn verify_gpodder_auth( let api_key = extract_api_key(&headers)?; validate_api_key(&state, &api_key).await?; - let verified = state.db_pool.verify_gpodder_auth(&request.gpodder_url, &request.gpodder_username, &request.gpodder_password).await?; - Ok(Json(serde_json::json!({ "verified": verified }))) + // Direct HTTP call to match Python implementation exactly + let client = reqwest::Client::new(); + let auth_url = format!("{}/api/2/auth/{}/login.json", + request.gpodder_url.trim_end_matches('/'), + request.gpodder_username); + + match client + .post(&auth_url) + .basic_auth(&request.gpodder_username, Some(&request.gpodder_password)) + .send() + .await + { + Ok(response) => { + if response.status().is_success() { + Ok(Json(serde_json::json!({"status": "success", "message": "Logged in!"}))) + } else { + Err(AppError::unauthorized("Authentication failed")) + } + } + Err(_) => { + Err(AppError::internal("Internal Server Error")) + } + } } // Request struct for add_gpodder_server @@ -1773,7 +2201,7 @@ pub async fn get_gpodder_settings( let settings = state.db_pool.get_gpodder_settings(user_id).await?; match settings { - Some(settings) => Ok(Json(settings)), + Some(settings) => Ok(Json(serde_json::json!({ "data": settings }))), None => Err(AppError::not_found("gPodder settings not found")), } } @@ -1783,7 +2211,7 @@ pub async fn check_gpodder_settings( State(state): State, Path(user_id): Path, headers: HeaderMap, -) -> Result, AppError> { +) -> Result, AppError> { let api_key = extract_api_key(&headers)?; validate_api_key(&state, &api_key).await?; @@ -1796,25 +2224,42 @@ pub async fn check_gpodder_settings( } let has_settings = state.db_pool.check_gpodder_settings(user_id).await?; - Ok(Json(has_settings)) + Ok(Json(serde_json::json!({ "data": has_settings }))) } // Remove podcast sync - matches Python remove_podcast_sync function exactly +#[derive(Debug, serde::Deserialize)] +pub struct RemoveSyncRequest { + pub user_id: i32, +} + pub async fn remove_podcast_sync( State(state): State, headers: HeaderMap, + Json(request): Json, ) -> Result, AppError> { let api_key = extract_api_key(&headers)?; validate_api_key(&state, &api_key).await?; - let user_id = state.db_pool.get_user_id_from_api_key(&api_key).await?; - let success = state.db_pool.remove_podcast_sync(user_id).await?; + // Check if the user has permission to modify this user's data + let user_id_from_api_key = state.db_pool.get_user_id_from_api_key(&api_key).await?; + let is_web_key = state.db_pool.is_web_key(&api_key).await?; + + if request.user_id != user_id_from_api_key && !is_web_key { + return Err(AppError::forbidden("You are not authorized to modify these user settings")); + } + + // Remove the sync settings + let success = state.db_pool.remove_gpodder_settings(request.user_id).await?; if success { - Ok(Json(serde_json::json!({ "status": "success" }))) + Ok(Json(serde_json::json!({ + "success": true, + "message": "Podcast sync settings removed successfully" + }))) } else { - Err(AppError::internal("Failed to remove podcast sync")) + Err(AppError::internal("Failed to remove podcast sync settings")) } } @@ -1827,6 +2272,8 @@ pub struct CustomPodcastRequest { pub user_id: i32, pub username: Option, pub password: Option, + pub youtube_channel: Option, + pub feed_cutoff: Option, } // Request struct for import_opml @@ -1857,6 +2304,9 @@ pub struct NotificationSettingsRequest { pub ntfy_access_token: Option, pub gotify_url: Option, pub gotify_token: Option, + pub http_url: Option, + pub http_token: Option, + pub http_method: Option, } // Request struct for test_notification @@ -1917,6 +2367,56 @@ pub async fn add_custom_podcast( return Err(AppError::forbidden("You can only add podcasts for yourself!")); } + // Check if this is a YouTube channel request + if request.youtube_channel.unwrap_or(false) { + // Extract channel ID from YouTube URL + let channel_id = extract_youtube_channel_id(&request.feed_url)?; + + // Check if channel already exists + let existing_id = state.db_pool.check_existing_channel_subscription( + &channel_id, + request.user_id, + ).await?; + + if let Some(podcast_id) = existing_id { + // Channel already subscribed, return existing podcast details + let podcast_details = state.db_pool.get_podcast_details(request.user_id, podcast_id).await?; + return Ok(Json(serde_json::json!({ "data": podcast_details }))); + } + + // Get channel info using yt-dlp (bypasses Google API limits) + let channel_info = crate::handlers::youtube::get_youtube_channel_info(&channel_id).await?; + + let feed_cutoff = request.feed_cutoff.unwrap_or(30); + + // Add YouTube channel to database + let podcast_id = state.db_pool.add_youtube_channel( + &channel_info, + request.user_id, + feed_cutoff, + ).await?; + + // Spawn background task to process YouTube videos + let state_clone = state.clone(); + let channel_id_clone = channel_id.clone(); + tokio::spawn(async move { + if let Err(e) = crate::handlers::youtube::process_youtube_channel( + podcast_id, + &channel_id_clone, + feed_cutoff, + &state_clone + ).await { + println!("Error processing YouTube channel {}: {}", channel_id_clone, e); + } + }); + + // Get complete podcast details for response + let podcast_details = state.db_pool.get_podcast_details(request.user_id, podcast_id).await?; + + return Ok(Json(serde_json::json!({ "data": podcast_details }))); + } + + // Regular podcast feed handling // Get podcast values from feed URL let podcast_values = state.db_pool.get_podcast_values( &request.feed_url, @@ -1929,7 +2429,9 @@ pub async fn add_custom_podcast( let (podcast_id, _) = state.db_pool.add_podcast_from_values( &podcast_values, request.user_id, - 30 + 30, + request.username.as_deref(), + request.password.as_deref() ).await?; // Get complete podcast details for response @@ -1938,6 +2440,44 @@ pub async fn add_custom_podcast( Ok(Json(serde_json::json!({ "data": podcast_details }))) } +// Helper function to extract YouTube channel ID from various URL formats +fn extract_youtube_channel_id(url: &str) -> Result { + // Support various YouTube URL formats: + // - https://www.youtube.com/channel/UC... + // - https://youtube.com/channel/UC... + // - https://www.youtube.com/@channelname + // - youtube.com/@channelname + // - Just the channel ID itself: UC... + + let url_lower = url.to_lowercase(); + + // If it's already a channel ID (starts with UC) + if url.starts_with("UC") && !url.contains('/') && !url.contains('.') { + return Ok(url.to_string()); + } + + // Extract from /channel/ URLs + if url_lower.contains("/channel/") { + if let Some(channel_part) = url.split("/channel/").nth(1) { + let channel_id = channel_part.split(&['/', '?', '&'][..]).next().unwrap_or(""); + if !channel_id.is_empty() { + return Ok(channel_id.to_string()); + } + } + } + + // For @handle URLs, we need to use yt-dlp to resolve the channel ID + // This will be handled by get_youtube_channel_info, so we return the URL as-is + if url_lower.contains("/@") || url.starts_with('@') { + return Ok(url.to_string()); + } + + Err(AppError::bad_request(&format!( + "Invalid YouTube channel URL. Expected format: https://www.youtube.com/channel/UC... or https://www.youtube.com/@channelname or just the channel ID. Got: {}", + url + ))) +} + // Import OPML - matches Python import_opml function exactly with background processing pub async fn import_opml( State(state): State, @@ -1980,7 +2520,9 @@ pub async fn import_opml( let _ = state_clone.db_pool.add_podcast_from_values( &podcast_values, user_id, - 30 // feed_cutoff + 30, // feed_cutoff + None, // username + None // password ).await; } Err(e) => { @@ -2077,7 +2619,10 @@ pub async fn update_notification_settings( request.ntfy_password.as_deref(), request.ntfy_access_token.as_deref(), request.gotify_url.as_deref(), - request.gotify_token.as_deref() + request.gotify_token.as_deref(), + request.http_url.as_deref(), + request.http_token.as_deref(), + request.http_method.as_deref() ).await?; Ok(Json(serde_json::json!({ "detail": "Notification settings updated successfully" }))) } @@ -2101,8 +2646,13 @@ pub async fn test_notification( // Get notification settings and send test notification let settings = state.db_pool.get_notification_settings(request.user_id).await?; - let settings_json = serde_json::to_value(&settings)?; - let success = state.notification_manager.send_test_notification(request.user_id, &request.platform, &settings_json).await?; + + // Find settings for the specific platform + let platform_settings = settings.iter() + .find(|s| s.get("platform").and_then(|p| p.as_str()) == Some(&request.platform)) + .ok_or_else(|| AppError::bad_request(&format!("No settings found for platform: {}", request.platform)))?; + + let success = state.notification_manager.send_test_notification(request.user_id, &request.platform, platform_settings).await?; if success { Ok(Json(serde_json::json!({ "detail": "Test notification sent successfully" }))) @@ -2145,11 +2695,65 @@ pub async fn add_oidc_provider( request.username_claim.as_deref().unwrap_or("username"), request.roles_claim.as_deref().unwrap_or(""), request.user_role.as_deref().unwrap_or(""), - request.admin_role.as_deref().unwrap_or("") + request.admin_role.as_deref().unwrap_or(""), + false // initialized_from_env = false (added via UI) ).await?; Ok(Json(serde_json::json!({ "provider_id": provider_id }))) } +// Update OIDC provider - updates an existing provider +pub async fn update_oidc_provider( + State(state): State, + headers: HeaderMap, + Path(provider_id): Path, + Json(request): Json, +) -> Result, AppError> { + let api_key = extract_api_key(&headers)?; + validate_api_key(&state, &api_key).await?; + + // Check if user is admin - OIDC provider management requires admin access + let user_id = state.db_pool.get_user_id_from_api_key(&api_key).await?; + let is_admin = state.db_pool.user_admin_check(user_id).await?; + + if !is_admin { + return Err(AppError::forbidden("Admin access required to update OIDC providers")); + } + + // Only update client_secret if it's not empty + let client_secret_to_update = if request.client_secret.is_empty() { + None + } else { + Some(request.client_secret.as_str()) + }; + + let success = state.db_pool.update_oidc_provider( + provider_id, + &request.provider_name, + &request.client_id, + client_secret_to_update, + &request.authorization_url, + &request.token_url, + &request.user_info_url, + &request.button_text, + &request.scope, + &request.button_color, + &request.button_text_color, + request.icon_svg.as_deref().unwrap_or(""), + request.name_claim.as_deref().unwrap_or("name"), + request.email_claim.as_deref().unwrap_or("email"), + request.username_claim.as_deref().unwrap_or("username"), + request.roles_claim.as_deref().unwrap_or(""), + request.user_role.as_deref().unwrap_or(""), + request.admin_role.as_deref().unwrap_or("") + ).await?; + + if success { + Ok(Json(serde_json::json!({ "message": "OIDC provider updated successfully" }))) + } else { + Err(AppError::not_found("OIDC provider not found")) + } +} + // List OIDC providers - matches Python list_oidc_providers function exactly pub async fn list_oidc_providers( State(state): State, @@ -2162,6 +2766,38 @@ pub async fn list_oidc_providers( Ok(Json(serde_json::json!({ "providers": providers }))) } +// Remove OIDC provider - matches Python remove_oidc_provider function exactly +pub async fn remove_oidc_provider( + State(state): State, + headers: HeaderMap, + Json(provider_id): Json, +) -> Result, AppError> { + let api_key = extract_api_key(&headers)?; + validate_api_key(&state, &api_key).await?; + + // Check if user is admin - OIDC provider management requires admin access + let user_id = state.db_pool.get_user_id_from_api_key(&api_key).await?; + let is_admin = state.db_pool.user_admin_check(user_id).await?; + + if !is_admin { + return Err(AppError::forbidden("Admin access required to remove OIDC providers")); + } + + // Check if provider was initialized from environment variables + let is_env_initialized = state.db_pool.is_oidc_provider_env_initialized(provider_id).await?; + if is_env_initialized { + return Err(AppError::forbidden("Cannot remove OIDC provider that was initialized from environment variables. Providers created from docker-compose environment variables are protected from removal to prevent login issues.")); + } + + let success = state.db_pool.remove_oidc_provider(provider_id).await?; + + if success { + Ok(Json(serde_json::json!({ "message": "OIDC provider removed successfully" }))) + } else { + Err(AppError::not_found("OIDC provider not found")) + } +} + // Get startpage - matches Python startpage GET function exactly pub async fn get_startpage( State(state): State, @@ -2439,7 +3075,11 @@ pub async fn toggle_podcast_notifications( let success = state.db_pool.toggle_podcast_notifications(request.user_id, request.podcast_id, request.enabled).await?; - Ok(Json(serde_json::json!(success))) + if success { + Ok(Json(serde_json::json!({ "detail": "Notification settings updated successfully" }))) + } else { + Ok(Json(serde_json::json!({ "detail": "Failed to update notification settings" }))) + } } // Request struct for adjust_skip_times - matches Python SkipTimesRequest model @@ -2601,3 +3241,853 @@ pub async fn verify_mfa( } } +// Scheduled backup management +#[derive(Deserialize)] +pub struct ScheduleBackupRequest { + pub user_id: i32, + pub cron_schedule: String, // e.g., "0 2 * * *" for daily at 2 AM + pub enabled: bool, +} + +#[derive(Deserialize)] +pub struct GetScheduledBackupRequest { + pub user_id: i32, +} + +#[derive(Deserialize)] +pub struct ListBackupFilesRequest { + pub user_id: i32, +} + +#[derive(Deserialize)] +pub struct RestoreBackupFileRequest { + pub user_id: i32, + pub backup_filename: String, +} + +// Schedule automatic backup - admin only +pub async fn schedule_backup( + State(state): State, + headers: HeaderMap, + Json(request): Json, +) -> Result, AppError> { + let api_key = extract_api_key(&headers)?; + validate_api_key(&state, &api_key).await?; + + // Check if user is admin + let requesting_user_id = state.db_pool.get_user_id_from_api_key(&api_key).await?; + let is_admin = state.db_pool.user_admin_check(requesting_user_id).await?; + + if !is_admin { + return Err(AppError::forbidden("Admin access required")); + } + + // Validate cron expression using tokio-cron-scheduler + use tokio_cron_scheduler::Job; + if let Err(_) = Job::new(&request.cron_schedule, |_uuid, _lock| {}) { + return Err(AppError::bad_request("Invalid cron schedule format")); + } + + // Store the schedule in database + state.db_pool.set_scheduled_backup(request.user_id, &request.cron_schedule, request.enabled).await?; + + Ok(Json(serde_json::json!({ + "detail": "Backup schedule updated successfully", + "schedule": request.cron_schedule, + "enabled": request.enabled + }))) +} + +// Get scheduled backup settings - admin only +pub async fn get_scheduled_backup( + State(state): State, + headers: HeaderMap, + Json(request): Json, +) -> Result, AppError> { + let api_key = extract_api_key(&headers)?; + validate_api_key(&state, &api_key).await?; + + // Check if user is admin + let requesting_user_id = state.db_pool.get_user_id_from_api_key(&api_key).await?; + let is_admin = state.db_pool.user_admin_check(requesting_user_id).await?; + + if !is_admin { + return Err(AppError::forbidden("Admin access required")); + } + + let schedule_info = state.db_pool.get_scheduled_backup(request.user_id).await?; + + Ok(Json(serde_json::json!(schedule_info))) +} + +// List backup files in mounted backup directory - admin only +pub async fn list_backup_files( + State(state): State, + headers: HeaderMap, + Json(request): Json, +) -> Result, AppError> { + let api_key = extract_api_key(&headers)?; + validate_api_key(&state, &api_key).await?; + + // Check if user is admin + let requesting_user_id = state.db_pool.get_user_id_from_api_key(&api_key).await?; + let is_admin = state.db_pool.user_admin_check(requesting_user_id).await?; + + if !is_admin { + return Err(AppError::forbidden("Admin access required")); + } + + use std::fs; + + let backup_dir = "/opt/pinepods/backups"; + let backup_files = match fs::read_dir(backup_dir) { + Ok(entries) => { + let mut files = Vec::new(); + for entry in entries { + if let Ok(entry) = entry { + let path = entry.path(); + if path.is_file() && path.extension().map_or(false, |ext| ext == "sql") { + if let Some(filename) = path.file_name().and_then(|n| n.to_str()) { + let metadata = entry.metadata().ok(); + let size = metadata.as_ref().map(|m| m.len()).unwrap_or(0); + let modified = metadata.as_ref() + .and_then(|m| m.modified().ok()) + .and_then(|t| t.duration_since(std::time::UNIX_EPOCH).ok()) + .map(|d| d.as_secs()) + .unwrap_or(0); + + files.push(serde_json::json!({ + "filename": filename, + "size": size, + "modified": modified + })); + } + } + } + } + files.sort_by(|a, b| { + let a_modified = a["modified"].as_u64().unwrap_or(0); + let b_modified = b["modified"].as_u64().unwrap_or(0); + b_modified.cmp(&a_modified) // Sort by modified date desc (newest first) + }); + files + } + Err(_) => { + return Err(AppError::internal("Failed to read backup directory")); + } + }; + + Ok(Json(serde_json::json!({ + "backup_files": backup_files + }))) +} + +// Restore from backup file in mounted directory - admin only +pub async fn restore_from_backup_file( + State(state): State, + headers: HeaderMap, + Json(request): Json, +) -> Result, AppError> { + let api_key = extract_api_key(&headers)?; + validate_api_key(&state, &api_key).await?; + + // Check if user is admin + let requesting_user_id = state.db_pool.get_user_id_from_api_key(&api_key).await?; + let is_admin = state.db_pool.user_admin_check(requesting_user_id).await?; + + if !is_admin { + return Err(AppError::forbidden("Admin access required")); + } + + // Validate filename to prevent path traversal + let backup_filename = request.backup_filename.clone(); + if backup_filename.contains("..") || backup_filename.contains("/") || !backup_filename.ends_with(".sql") { + return Err(AppError::bad_request("Invalid backup filename")); + } + + let backup_path = format!("/opt/pinepods/backups/{}", backup_filename); + + // Check if file exists + if !std::path::Path::new(&backup_path).exists() { + return Err(AppError::not_found("Backup file not found")); + } + + // Clone for the async closure + let backup_filename_for_closure = backup_filename.clone(); + + // Spawn restoration task + let task_id = state.task_spawner.spawn_progress_task( + "restore_from_backup_file".to_string(), + 0, // System user + move |reporter| { + let backup_path = backup_path.clone(); + let backup_filename = backup_filename_for_closure; + async move { + reporter.update_progress(10.0, Some("Starting restoration from backup file...".to_string())).await?; + + // Get database password from environment + let db_password = std::env::var("DB_PASSWORD") + .map_err(|_| AppError::internal("Database password not found in environment"))?; + + reporter.update_progress(50.0, Some("Restoring database...".to_string())).await?; + + // Execute restoration based on database type + use tokio::process::Command; + let db_type = std::env::var("DB_TYPE").unwrap_or_else(|_| "postgresql".to_string()); + let db_host = std::env::var("DB_HOST").unwrap_or_else(|_| "localhost".to_string()); + let db_name = std::env::var("DB_NAME").unwrap_or_else(|_| "pinepods_database".to_string()); + + let output = if db_type.to_lowercase().contains("mysql") || db_type.to_lowercase().contains("mariadb") { + let db_port = std::env::var("DB_PORT").unwrap_or_else(|_| "3306".to_string()); + let db_user = std::env::var("DB_USER").unwrap_or_else(|_| "root".to_string()); + + let mut cmd = Command::new("mysql"); + cmd.arg("-h").arg(&db_host) + .arg("-P").arg(&db_port) + .arg("-u").arg(&db_user) + .arg(&format!("-p{}", db_password)) + .arg("--ssl-verify-server-cert=0") + .arg(&db_name); + + // For MySQL, we need to pipe the file content to stdin + cmd.stdin(std::process::Stdio::piped()); + let mut child = cmd.spawn() + .map_err(|e| AppError::internal(&format!("Failed to execute mysql: {}", e)))?; + + // Read the backup file and send to mysql stdin + let backup_content = tokio::fs::read_to_string(&backup_path).await + .map_err(|e| AppError::internal(&format!("Failed to read backup file: {}", e)))?; + + if let Some(stdin) = child.stdin.as_mut() { + use tokio::io::AsyncWriteExt; + stdin.write_all(backup_content.as_bytes()).await + .map_err(|e| AppError::internal(&format!("Failed to write to mysql stdin: {}", e)))?; + } + + child.wait_with_output().await + .map_err(|e| AppError::internal(&format!("Failed to wait for mysql: {}", e)))? + } else { + // PostgreSQL + let db_port = std::env::var("DB_PORT").unwrap_or_else(|_| "5432".to_string()); + let db_user = std::env::var("DB_USER").unwrap_or_else(|_| "postgres".to_string()); + + let mut cmd = Command::new("psql"); + cmd.arg("-h").arg(&db_host) + .arg("-p").arg(&db_port) + .arg("-U").arg(&db_user) + .arg("-d").arg(&db_name) + .arg("-f").arg(&backup_path) + .env("PGPASSWORD", &db_password); + + cmd.output().await + .map_err(|e| AppError::internal(&format!("Failed to execute psql: {}", e)))? + }; + + if !output.status.success() { + let error_msg = String::from_utf8_lossy(&output.stderr); + return Err(AppError::internal(&format!("Restore failed: {}", error_msg))); + } + + reporter.update_progress(100.0, Some("Restoration completed successfully".to_string())).await?; + + Ok(serde_json::json!({ + "status": "Restoration completed successfully", + "backup_file": backup_filename + })) + } + } + ).await?; + + Ok(Json(serde_json::json!({ + "detail": "Restoration started", + "task_id": task_id + }))) +} + +// Request struct for manual backup to directory +#[derive(Deserialize)] +pub struct ManualBackupRequest { + pub user_id: i32, +} + +// Manual backup to directory - admin only +pub async fn manual_backup_to_directory( + State(state): State, + headers: HeaderMap, + Json(request): Json, +) -> Result, AppError> { + let api_key = extract_api_key(&headers)?; + validate_api_key(&state, &api_key).await?; + + // Check if user is admin + let requesting_user_id = state.db_pool.get_user_id_from_api_key(&api_key).await?; + let is_admin = state.db_pool.user_admin_check(requesting_user_id).await?; + + if !is_admin { + return Err(AppError::forbidden("Admin access required")); + } + + // Generate filename with timestamp + let timestamp = chrono::Utc::now().format("%Y%m%d_%H%M%S"); + let backup_filename = format!("manual_backup_{}.sql", timestamp); + let backup_path = format!("/opt/pinepods/backups/{}", backup_filename); + + // Ensure backup directory exists + if let Err(e) = std::fs::create_dir_all("/opt/pinepods/backups") { + return Err(AppError::internal(&format!("Failed to create backup directory: {}", e))); + } + + // Set ownership using PUID/PGID environment variables + let puid: u32 = std::env::var("PUID").unwrap_or_else(|_| "1000".to_string()).parse().unwrap_or(1000); + let pgid: u32 = std::env::var("PGID").unwrap_or_else(|_| "1000".to_string()).parse().unwrap_or(1000); + + // Set directory ownership (ignore errors for NFS mounts) + let _ = std::process::Command::new("chown") + .args(&[format!("{}:{}", puid, pgid), "/opt/pinepods/backups".to_string()]) + .output(); + + // Clone for the async closure + let backup_filename_for_closure = backup_filename.clone(); + + // Spawn backup task + let task_id = state.task_spawner.spawn_progress_task( + "manual_backup_to_directory".to_string(), + 0, // System user + move |reporter| { + let backup_path = backup_path.clone(); + let backup_filename = backup_filename_for_closure; + async move { + reporter.update_progress(10.0, Some("Starting manual backup...".to_string())).await?; + + // Get database credentials from environment + let db_type = std::env::var("DB_TYPE").unwrap_or_else(|_| "postgresql".to_string()); + let db_host = std::env::var("DB_HOST").unwrap_or_else(|_| "localhost".to_string()); + let db_name = std::env::var("DB_NAME").unwrap_or_else(|_| "pinepods_database".to_string()); + let db_password = std::env::var("DB_PASSWORD") + .map_err(|_| AppError::internal("Database password not found in environment"))?; + + reporter.update_progress(30.0, Some("Creating database backup...".to_string())).await?; + + // Use appropriate backup command based on database type + let output = if db_type.to_lowercase().contains("mysql") || db_type.to_lowercase().contains("mariadb") { + let db_port = std::env::var("DB_PORT").unwrap_or_else(|_| "3306".to_string()); + let db_user = std::env::var("DB_USER").unwrap_or_else(|_| "root".to_string()); + + tokio::process::Command::new("mysqldump") + .args(&[ + "-h", &db_host, + "-P", &db_port, + "-u", &db_user, + &format!("-p{}", db_password), + "--single-transaction", + "--routines", + "--triggers", + "--ssl-verify-server-cert=0", + "--result-file", &backup_path, + &db_name + ]) + .output() + .await + .map_err(|e| AppError::internal(&format!("Failed to execute mysqldump: {}", e)))? + } else { + // PostgreSQL + let db_port = std::env::var("DB_PORT").unwrap_or_else(|_| "5432".to_string()); + let db_user = std::env::var("DB_USER").unwrap_or_else(|_| "postgres".to_string()); + + tokio::process::Command::new("pg_dump") + .env("PGPASSWORD", db_password) + .args(&[ + "-h", &db_host, + "-p", &db_port, + "-U", &db_user, + "-d", &db_name, + "--clean", + "--if-exists", + "--no-owner", + "--no-privileges", + "-f", &backup_path + ]) + .output() + .await + .map_err(|e| AppError::internal(&format!("Failed to execute pg_dump: {}", e)))? + }; + + if !output.status.success() { + let error_msg = String::from_utf8_lossy(&output.stderr); + return Err(AppError::internal(&format!("Backup failed: {}", error_msg))); + } + + reporter.update_progress(90.0, Some("Finalizing backup...".to_string())).await?; + + // Set file ownership using PUID/PGID environment variables + let puid: u32 = std::env::var("PUID").unwrap_or_else(|_| "1000".to_string()).parse().unwrap_or(1000); + let pgid: u32 = std::env::var("PGID").unwrap_or_else(|_| "1000".to_string()).parse().unwrap_or(1000); + + // Set backup file ownership (ignore errors for NFS mounts) + let _ = std::process::Command::new("chown") + .args(&[format!("{}:{}", puid, pgid), backup_path.clone()]) + .output(); + + // Check if backup file was created and get its size + let backup_info = match std::fs::metadata(&backup_path) { + Ok(metadata) => serde_json::json!({ + "filename": backup_filename, + "size": metadata.len(), + "path": backup_path + }), + Err(_) => { + return Err(AppError::internal("Backup file was not created")); + } + }; + + reporter.update_progress(100.0, Some("Manual backup completed successfully".to_string())).await?; + + Ok(serde_json::json!({ + "status": "Manual backup completed successfully", + "backup_info": backup_info + })) + } + } + ).await?; + + Ok(Json(serde_json::json!({ + "detail": "Manual backup started", + "task_id": task_id, + "filename": backup_filename + }))) +} + +// Request for getting podcasts with podcast_index_id = 0 +#[derive(Deserialize)] +pub struct GetUnmatchedPodcastsRequest { + pub user_id: i32, +} + +// Get podcasts that have podcast_index_id = 0 (imported via OPML without podcast index match) +pub async fn get_unmatched_podcasts( + headers: HeaderMap, + State(state): State, + Json(request): Json, +) -> Result, AppError> { + let api_key = extract_api_key(&headers)?; + + // Verify API key + let is_valid = state.db_pool.verify_api_key(&api_key).await?; + if !is_valid { + return Err(AppError::unauthorized("Invalid API key")); + } + + // Check if it's web key or user's own key + let is_web_key = state.db_pool.is_web_key(&api_key).await?; + let key_id = state.db_pool.get_user_id_from_api_key(&api_key).await?; + + if key_id == request.user_id || is_web_key { + let podcasts = state.db_pool.get_unmatched_podcasts(request.user_id).await?; + Ok(Json(serde_json::json!({"podcasts": podcasts}))) + } else { + Err(AppError::forbidden("You can only access your own podcasts")) + } +} + +// Request for updating podcast index ID +#[derive(Deserialize)] +pub struct UpdatePodcastIndexIdRequest { + pub user_id: i32, + pub podcast_id: i32, + pub podcast_index_id: i32, +} + +// Update a podcast's podcast_index_id +pub async fn update_podcast_index_id( + headers: HeaderMap, + State(state): State, + Json(request): Json, +) -> Result, AppError> { + let api_key = extract_api_key(&headers)?; + + // Verify API key + let is_valid = state.db_pool.verify_api_key(&api_key).await?; + if !is_valid { + return Err(AppError::unauthorized("Invalid API key")); + } + + // Check if it's web key or user's own key + let is_web_key = state.db_pool.is_web_key(&api_key).await?; + let key_id = state.db_pool.get_user_id_from_api_key(&api_key).await?; + + if key_id == request.user_id || is_web_key { + state.db_pool.update_podcast_index_id( + request.user_id, + request.podcast_id, + request.podcast_index_id + ).await?; + + Ok(Json(serde_json::json!({ + "detail": "Podcast index ID updated successfully" + }))) + } else { + Err(AppError::forbidden("You can only update your own podcasts")) + } +} + +// Request for ignoring a podcast index ID +#[derive(Deserialize)] +pub struct IgnorePodcastIndexIdRequest { + pub user_id: i32, + pub podcast_id: i32, + pub ignore: bool, +} + +#[derive(Deserialize)] +pub struct GetIgnoredPodcastsRequest { + pub user_id: i32, +} + +// Ignore/unignore a podcast's index ID requirement +pub async fn ignore_podcast_index_id( + headers: HeaderMap, + State(state): State, + Json(request): Json, +) -> Result, AppError> { + let api_key = extract_api_key(&headers)?; + + // Verify API key + let is_valid = state.db_pool.verify_api_key(&api_key).await?; + if !is_valid { + return Err(AppError::unauthorized("Invalid API key")); + } + + // Check if it's web key or user's own key + let is_web_key = state.db_pool.is_web_key(&api_key).await?; + let key_id = state.db_pool.get_user_id_from_api_key(&api_key).await?; + + if key_id == request.user_id || is_web_key { + state.db_pool.ignore_podcast_index_id( + request.user_id, + request.podcast_id, + request.ignore + ).await?; + + let action = if request.ignore { "ignored" } else { "unignored" }; + Ok(Json(serde_json::json!({ + "detail": format!("Podcast index ID requirement {}", action) + }))) + } else { + Err(AppError::forbidden("You can only update your own podcasts")) + } +} + +// Get podcasts that are ignored from podcast index matching +pub async fn get_ignored_podcasts( + headers: HeaderMap, + State(state): State, + Json(request): Json, +) -> Result, AppError> { + let api_key = extract_api_key(&headers)?; + + // Verify API key + let is_valid = state.db_pool.verify_api_key(&api_key).await?; + if !is_valid { + return Err(AppError::unauthorized("Invalid API key")); + } + + // Check if it's web key or user's own key + let is_web_key = state.db_pool.is_web_key(&api_key).await?; + let key_id = state.db_pool.get_user_id_from_api_key(&api_key).await?; + + if key_id == request.user_id || is_web_key { + let podcasts = state.db_pool.get_ignored_podcasts(request.user_id).await?; + + Ok(Json(serde_json::json!({ + "podcasts": podcasts + }))) + } else { + Err(AppError::forbidden("You can only view your own podcasts")) + } +} + +// Get user's language preference +pub async fn get_user_language( + State(state): State, + headers: HeaderMap, + Query(params): Query>, +) -> Result, AppError> { + let api_key = extract_api_key(&headers)?; + validate_api_key(&state, &api_key).await?; + + let user_id: i32 = params + .get("user_id") + .ok_or_else(|| AppError::bad_request("Missing user_id parameter"))? + .parse() + .map_err(|_| AppError::bad_request("Invalid user_id format"))?; + + check_user_access(&state, &api_key, user_id).await?; + + let language = state.db_pool.get_user_language(user_id).await?; + + Ok(Json(UserLanguageResponse { language })) +} + +// Update user's language preference +pub async fn update_user_language( + State(state): State, + headers: HeaderMap, + Json(request): Json, +) -> Result, AppError> { + let api_key = extract_api_key(&headers)?; + validate_api_key(&state, &api_key).await?; + + check_user_access(&state, &api_key, request.user_id).await?; + + let success = state.db_pool.update_user_language(request.user_id, &request.language).await?; + + if success { + Ok(Json(serde_json::json!({ + "success": true, + "language": request.language + }))) + } else { + Err(AppError::not_found("User not found")) + } +} + +// Get available languages by scanning translation files +pub async fn get_available_languages() -> Result, AppError> { + let translations_dir = std::path::Path::new("/var/www/html/static/translations"); + + let mut languages = Vec::new(); + + if let Ok(entries) = std::fs::read_dir(translations_dir) { + for entry in entries.flatten() { + if let Some(file_name) = entry.file_name().to_str() { + if file_name.ends_with(".json") { + let lang_code = file_name.strip_suffix(".json").unwrap_or(""); + + // Map language codes to human-readable names + let lang_name = match lang_code { + "en" => "English", + "ar" => "العربية", + "be" => "Беларуская", + "bg" => "Български", + "bn" => "বাংলা", + "ca" => "Català", + "cs" => "Čeština", + "da" => "Dansk", + "de" => "Deutsch", + "es" => "Español", + "et" => "Eesti", + "eu" => "Euskera", + "fa" => "فارسی", + "fi" => "Suomi", + "fr" => "Français", + "gu" => "ગુજરાતી", + "he" => "עברית", + "hi" => "हिन्दी", + "hr" => "Hrvatski", + "hu" => "Magyar", + "it" => "Italiano", + "ja" => "日本語", + "ko" => "한국어", + "lt" => "Lietuvių", + "nb" => "Norsk Bokmål", + "nl" => "Nederlands", + "pl" => "Polski", + "pt" => "Português", + "pt-BR" => "Português (Brasil)", + "ro" => "Română", + "ru" => "Русский", + "sk" => "Slovenčina", + "sl" => "Slovenščina", + "sv" => "Svenska", + "tr" => "Türkçe", + "uk" => "Українська", + "vi" => "Tiếng Việt", + "zh" => "中文", + "zh-Hans" => "中文 (简体)", + "zh-Hant" => "中文 (繁體)", + "test" => "Test Language", + _ => lang_code, // Fallback to code if name not mapped + }; + + // Validate that the translation file contains valid JSON + if let Ok(content) = std::fs::read_to_string(entry.path()) { + if serde_json::from_str::(&content).is_ok() { + languages.push(AvailableLanguage { + code: lang_code.to_string(), + name: lang_name.to_string(), + }); + } + } + } + } + } + } + + // Sort by language code for consistent ordering + languages.sort_by(|a, b| a.code.cmp(&b.code)); + + // Ensure English is always first if present + if let Some(en_index) = languages.iter().position(|l| l.code == "en") { + if en_index != 0 { + let en_lang = languages.remove(en_index); + languages.insert(0, en_lang); + } + } + + Ok(Json(AvailableLanguagesResponse { languages })) +} + +// Get server default language (no authentication required) +pub async fn get_server_default_language() -> Result, AppError> { + // Get default language from environment variable, fallback to 'en' + let default_language = std::env::var("DEFAULT_LANGUAGE").unwrap_or_else(|_| "en".to_string()); + + // Validate language code (basic validation) + let default_language = if default_language.len() > 10 || default_language.is_empty() { + "en" + } else { + &default_language + }; + + Ok(Json(serde_json::json!({ + "default_language": default_language + }))) +} + +// Request struct for set_global_podcast_cover_preference - matches playback speed pattern +#[derive(Deserialize)] +pub struct SetGlobalPodcastCoverPreference { + pub user_id: i32, + pub use_podcast_covers: bool, + pub podcast_id: Option, +} + +// Set global podcast cover preference - matches Python api_set_global_podcast_cover_preference function +pub async fn set_global_podcast_cover_preference( + State(state): State, + headers: HeaderMap, + Json(request): Json, +) -> Result, AppError> { + let api_key = extract_api_key(&headers)?; + validate_api_key(&state, &api_key).await?; + + // Check authorization - web key or user can only set their own preference + let key_id = state.db_pool.get_user_id_from_api_key(&api_key).await?; + let is_web_key = state.db_pool.is_web_key(&api_key).await?; + + if key_id != request.user_id && !is_web_key { + return Err(AppError::forbidden("You can only modify your own settings.")); + } + + // If podcast_id is provided, set per-podcast preference; otherwise set global preference + if let Some(podcast_id) = request.podcast_id { + state.db_pool.set_podcast_cover_preference(request.user_id, podcast_id, request.use_podcast_covers).await?; + Ok(Json(serde_json::json!({ "detail": "Podcast cover preference updated." }))) + } else { + state.db_pool.set_global_podcast_cover_preference(request.user_id, request.use_podcast_covers).await?; + Ok(Json(serde_json::json!({ "detail": "Global podcast cover preference updated." }))) + } +} + +// Request struct for set_podcast_cover_preference - matches podcast playback speed pattern +#[derive(Deserialize)] +pub struct SetPodcastCoverPreference { + pub user_id: i32, + pub podcast_id: i32, + pub use_podcast_covers: bool, +} + +// Set podcast cover preference - matches Python api_set_podcast_cover_preference function +pub async fn set_podcast_cover_preference( + State(state): State, + headers: HeaderMap, + Json(request): Json, +) -> Result, AppError> { + let api_key = extract_api_key(&headers)?; + validate_api_key(&state, &api_key).await?; + + // Check authorization - web key or user can only modify their own podcasts + let key_id = state.db_pool.get_user_id_from_api_key(&api_key).await?; + let is_web_key = state.db_pool.is_web_key(&api_key).await?; + + if key_id != request.user_id && !is_web_key { + return Err(AppError::forbidden("You can only modify your own podcasts.")); + } + + state.db_pool.set_podcast_cover_preference(request.user_id, request.podcast_id, request.use_podcast_covers).await?; + + Ok(Json(serde_json::json!({ "detail": "Podcast cover preference updated." }))) +} + +// Request struct for clear_podcast_cover_preference - matches clear playback speed pattern +#[derive(Deserialize)] +pub struct ClearPodcastCoverPreference { + pub user_id: i32, + pub podcast_id: i32, +} + +// Clear podcast cover preference - matches Python api_clear_podcast_cover_preference function +pub async fn clear_podcast_cover_preference( + State(state): State, + headers: HeaderMap, + Json(request): Json, +) -> Result, AppError> { + let api_key = extract_api_key(&headers)?; + validate_api_key(&state, &api_key).await?; + + // Check authorization - web key or user can only modify their own podcasts + let key_id = state.db_pool.get_user_id_from_api_key(&api_key).await?; + let is_web_key = state.db_pool.is_web_key(&api_key).await?; + + if key_id != request.user_id && !is_web_key { + return Err(AppError::forbidden("You can only modify your own podcasts.")); + } + + state.db_pool.clear_podcast_cover_preference(request.user_id, request.podcast_id).await?; + + Ok(Json(serde_json::json!({ "detail": "Podcast cover preference cleared." }))) +} + +// Get global podcast cover preference +pub async fn get_global_podcast_cover_preference( + State(state): State, + headers: HeaderMap, + Query(params): Query>, +) -> Result, AppError> { + let api_key = extract_api_key(&headers)?; + validate_api_key(&state, &api_key).await?; + + let user_id: i32 = params + .get("user_id") + .ok_or_else(|| AppError::bad_request("Missing user_id parameter"))? + .parse() + .map_err(|_| AppError::bad_request("Invalid user_id format"))?; + + // Check authorization - users can only access their own settings + let user_id_from_api_key = state.db_pool.get_user_id_from_api_key(&api_key).await?; + if user_id_from_api_key != user_id { + return Err(AppError::forbidden("You can only access your own settings.")); + } + + // If podcast_id is provided, get per-podcast preference; otherwise get global preference + let use_podcast_covers = if let Some(podcast_id_str) = params.get("podcast_id") { + let podcast_id: i32 = podcast_id_str + .parse() + .map_err(|_| AppError::bad_request("Invalid podcast_id format"))?; + + let per_podcast_preference = state.db_pool.get_podcast_cover_preference(user_id, podcast_id).await?; + + // If no per-podcast preference is set, fall back to global preference + match per_podcast_preference { + Some(preference) => preference, + None => state.db_pool.get_global_podcast_cover_preference(user_id).await?, + } + } else { + state.db_pool.get_global_podcast_cover_preference(user_id).await? + }; + + Ok(Json(serde_json::json!({ + "use_podcast_covers": use_podcast_covers + }))) +} + diff --git a/rust-api/src/handlers/sync.rs b/rust-api/src/handlers/sync.rs index 383e57f8..ea44e4e2 100644 --- a/rust-api/src/handlers/sync.rs +++ b/rust-api/src/handlers/sync.rs @@ -16,20 +16,29 @@ pub struct UpdateGpodderSyncRequest { pub enabled: bool, } -// Set default gPodder device - matches Python set_default_device function exactly +#[derive(Debug, Deserialize)] +pub struct RemoveSyncRequest { + pub user_id: i32, +} + +// Set default gPodder device - accepts device name for frontend compatibility pub async fn gpodder_set_default( State(state): State, - Path(device_id): Path, + Path(device_name): Path, headers: HeaderMap, ) -> Result, AppError> { let api_key = extract_api_key(&headers)?; validate_api_key(&state, &api_key).await?; let user_id = state.db_pool.get_user_id_from_api_key(&api_key).await?; - let success = state.db_pool.gpodder_set_default_device(user_id, device_id).await?; + let success = state.db_pool.gpodder_set_default_device_by_name(user_id, &device_name).await?; if success { - Ok(Json(serde_json::json!({ "status": "success" }))) + Ok(Json(serde_json::json!({ + "success": true, + "message": "Default device set successfully", + "data": null + }))) } else { Err(AppError::internal("Failed to set default device")) } @@ -69,7 +78,7 @@ pub async fn gpodder_get_all_devices( Ok(Json(serde_json::json!(devices))) } -// Force sync gPodder - matches Python force_sync function exactly +// Force sync gPodder - performs initial full sync without timestamps (like setup) pub async fn gpodder_force_sync( State(state): State, headers: HeaderMap, @@ -78,16 +87,85 @@ pub async fn gpodder_force_sync( validate_api_key(&state, &api_key).await?; let user_id = state.db_pool.get_user_id_from_api_key(&api_key).await?; - let success = state.db_pool.gpodder_force_sync(user_id).await?; + + // Get user's sync settings to determine which sync method to use + let sync_settings = state.db_pool.get_user_sync_settings(user_id).await?; + if sync_settings.is_none() { + return Ok(Json(serde_json::json!({ + "success": false, + "message": "No sync configured for this user", + "data": null + }))); + } + + let settings = sync_settings.unwrap(); + let device_name = state.db_pool.get_or_create_default_device(user_id).await?; + + // Perform initial full sync (without timestamps) based on sync type + let sync_result = match settings.sync_type.as_str() { + "gpodder" => { + // Internal gPodder API - call initial full sync + state.db_pool.call_gpodder_initial_full_sync(user_id, "http://localhost:8042", &settings.username, &settings.token, &device_name).await + } + "nextcloud" => { + // Nextcloud initial sync + state.db_pool.call_nextcloud_initial_full_sync(user_id, &settings.url, &settings.username, &settings.token).await + } + "external" => { + // External gPodder server - decrypt token first then call initial full sync + let decrypted_token = state.db_pool.decrypt_password(&settings.token).await.unwrap_or_default(); + state.db_pool.call_gpodder_initial_full_sync(user_id, &settings.url, &settings.username, &decrypted_token, &device_name).await + } + "both" => { + // Both internal and external - call initial sync for both + let internal_result = state.db_pool.call_gpodder_initial_full_sync(user_id, "http://localhost:8042", &settings.username, &settings.token, &device_name).await; + let decrypted_token = state.db_pool.decrypt_password(&settings.token).await.unwrap_or_default(); + let external_result = state.db_pool.call_gpodder_initial_full_sync(user_id, &settings.url, &settings.username, &decrypted_token, &device_name).await; + + match (internal_result, external_result) { + (Ok(internal_success), Ok(external_success)) => Ok(internal_success || external_success), + (Ok(internal_success), Err(external_err)) => { + tracing::warn!("External sync failed: {}, but internal sync succeeded: {}", external_err, internal_success); + Ok(internal_success) + } + (Err(internal_err), Ok(external_success)) => { + tracing::warn!("Internal sync failed: {}, but external sync succeeded: {}", internal_err, external_success); + Ok(external_success) + } + (Err(internal_err), Err(external_err)) => { + tracing::error!("Both internal and external sync failed: internal={}, external={}", internal_err, external_err); + Err(internal_err) + } + } + } + _ => Ok(false) + }; + + let (success, error_message) = match sync_result { + Ok(result) => (result, None), + Err(e) => { + tracing::error!("Sync failed with error: {}", e); + (false, Some(e.to_string())) + } + }; if success { - Ok(Json(serde_json::json!({ "status": "success" }))) + Ok(Json(serde_json::json!({ + "success": true, + "message": "Initial sync completed successfully - all data refreshed", + "data": null + }))) } else { - Err(AppError::internal("Failed to force sync")) + let message = error_message.unwrap_or_else(|| "Initial sync failed - please check your sync configuration".to_string()); + Ok(Json(serde_json::json!({ + "success": false, + "message": format!("Initial sync failed: {}", message), + "data": null + }))) } } -// Regular gPodder sync - matches Python sync function exactly +// Regular gPodder sync - performs standard incremental sync with timestamps (like tasks.rs) pub async fn gpodder_sync( State(state): State, headers: HeaderMap, @@ -96,13 +174,23 @@ pub async fn gpodder_sync( validate_api_key(&state, &api_key).await?; let user_id = state.db_pool.get_user_id_from_api_key(&api_key).await?; - let sync_result = state.db_pool.gpodder_sync(user_id).await?; - Ok(Json(serde_json::json!({ - "status": "success", - "synced_podcasts": sync_result.synced_podcasts, - "synced_episodes": sync_result.synced_episodes - }))) + // Use the same sync process as the scheduler (tasks.rs) which uses proper API calls with timestamps + let sync_result = state.db_pool.refresh_gpodder_subscription_background(user_id).await?; + + if sync_result { + Ok(Json(serde_json::json!({ + "success": true, + "message": "Sync completed successfully", + "data": null + }))) + } else { + Ok(Json(serde_json::json!({ + "success": false, + "message": "Sync failed or no changes detected - check your sync configuration", + "data": null + }))) + } } // Get gPodder status - matches Python get_gpodder_status function exactly @@ -118,7 +206,7 @@ pub async fn gpodder_status( Ok(Json(serde_json::json!({ "sync_type": status.sync_type, - "gpodder_enabled": status.sync_type == "gpodder" || status.sync_type == "both", + "gpodder_enabled": status.sync_type == "gpodder" || status.sync_type == "both" || status.sync_type == "external", "external_enabled": status.sync_type == "external" || status.sync_type == "both", "external_url": status.gpodder_url, "api_url": "http://localhost:8042" @@ -241,8 +329,38 @@ pub async fn gpodder_test_connection( return Err(AppError::forbidden("You can only test connections for yourself!")); } - let verified = state.db_pool.verify_gpodder_auth(gpodder_url, gpodder_username, gpodder_password).await?; - Ok(Json(serde_json::json!({ "verified": verified }))) + // Direct HTTP call to match Python implementation exactly + let client = reqwest::Client::new(); + let auth_url = format!("{}/api/2/auth/{}/login.json", + gpodder_url.trim_end_matches('/'), + gpodder_username); + + let verified = match client + .post(&auth_url) + .basic_auth(gpodder_username, Some(gpodder_password)) + .send() + .await + { + Ok(response) => response.status().is_success(), + Err(_) => false, + }; + + if verified { + Ok(Json(serde_json::json!({ + "success": true, + "message": "Successfully connected to GPodder server and verified access.", + "data": { + "auth_type": "session", + "has_devices": true + } + }))) + } else { + Ok(Json(serde_json::json!({ + "success": false, + "message": "Failed to connect to GPodder server", + "data": null + }))) + } } // Get default gPodder device - matches Python get_default_device function exactly @@ -276,18 +394,34 @@ pub async fn gpodder_create_device( validate_api_key(&state, &api_key).await?; let user_id = state.db_pool.get_user_id_from_api_key(&api_key).await?; - let device_id = state.db_pool.gpodder_create_device_with_caption( - user_id, - &request.device_name, - &request.device_type, - request.device_caption.as_deref(), - false - ).await?; + // Get user's GPodder sync settings + let settings = state.db_pool.get_user_sync_settings(user_id).await? + .ok_or_else(|| AppError::BadRequest("User not found or GPodder sync not configured".to_string()))?; + + // Validate that GPodder sync is enabled + if settings.sync_type != "gpodder" && settings.sync_type != "both" && settings.sync_type != "external" { + return Err(AppError::BadRequest("GPodder sync is not enabled for this user".to_string())); + } + + // Create device via GPodder API (uses proper auth for internal/external) + let device_id = state.db_pool.create_device_via_gpodder_api( + &settings.url, + &settings.username, + &settings.token, + &request.device_name + ).await.map_err(|e| AppError::Internal(format!("Failed to create device via GPodder API: {}", e)))?; + + // Return GPodder API standard format Ok(Json(serde_json::json!({ - "status": "success", - "device_id": device_id, - "device_name": request.device_name + "id": device_id, // GPodder device ID (string) + "name": request.device_name, + "type": request.device_type, + "caption": request.device_caption.unwrap_or_else(|| request.device_name.clone()), + "last_sync": Option::::None, + "is_active": true, + "is_remote": true, + "is_default": false }))) } diff --git a/rust-api/src/handlers/tasks.rs b/rust-api/src/handlers/tasks.rs index 4cc94025..e303bbc7 100644 --- a/rust-api/src/handlers/tasks.rs +++ b/rust-api/src/handlers/tasks.rs @@ -3,7 +3,7 @@ use axum::{ http::HeaderMap, response::Json, }; -use serde::{Deserialize, Serialize}; +use serde::Deserialize; use serde_json; use crate::{ @@ -36,6 +36,9 @@ pub async fn startup_tasks( // Execute the startup tasks state.db_pool.add_news_feed_if_not_added().await?; + + // Create default playlists for any users that might be missing them + state.db_pool.create_missing_default_playlists().await?; Ok(Json(serde_json::json!({"status": "Startup tasks completed successfully."}))) } diff --git a/rust-api/src/handlers/users.rs b/rust-api/src/handlers/users.rs deleted file mode 100644 index b3651f92..00000000 --- a/rust-api/src/handlers/users.rs +++ /dev/null @@ -1,10 +0,0 @@ -use axum::{extract::State, http::HeaderMap, response::Json}; -use serde_json::json; -use crate::{ - error::AppResult, - handlers::{extract_api_key, validate_api_key}, - AppState, -}; - -// User endpoints will be implemented here to match clientapi.py -// Examples: get_user_settings, update_user_settings, etc. \ No newline at end of file diff --git a/rust-api/src/handlers/websocket.rs b/rust-api/src/handlers/websocket.rs index 0ce2f0c9..dd12075f 100644 --- a/rust-api/src/handlers/websocket.rs +++ b/rust-api/src/handlers/websocket.rs @@ -9,8 +9,7 @@ use futures::{sink::SinkExt, stream::StreamExt}; use std::{collections::HashMap, sync::Arc}; use tokio::sync::{broadcast, RwLock}; use crate::{ - error::AppResult, - services::task_manager::{TaskManager, TaskUpdate, WebSocketMessage}, + services::task_manager::{TaskUpdate, WebSocketMessage}, AppState, }; @@ -68,20 +67,22 @@ pub async fn task_progress_websocket( // Validate API key before upgrading websocket match state.db_pool.verify_api_key(&query.api_key).await { Ok(true) => { - // Also verify the API key belongs to this user or is a web key + // Verify the API key belongs to this user (or system user for background tasks) match state.db_pool.get_user_id_from_api_key(&query.api_key).await { Ok(key_user_id) => { - let is_web_key = state.db_pool.is_web_key(&query.api_key).await.unwrap_or(false); - if key_user_id == user_id || is_web_key { + // Allow access if API key matches the user or if it's the system user (ID 1) + if key_user_id == user_id || key_user_id == 1 { ws.on_upgrade(move |socket| handle_task_progress_socket(socket, user_id, state)) } else { + tracing::warn!("WebSocket auth failed: API key user {} tried to access user {} tasks", key_user_id, user_id); axum::response::Response::builder() .status(403) - .body("Unauthorized".into()) + .body("Unauthorized - API key does not belong to requested user".into()) .unwrap() } } - Err(_) => { + Err(e) => { + tracing::error!("WebSocket auth error getting user ID from API key: {}", e); axum::response::Response::builder() .status(403) .body("Invalid API key".into()) @@ -89,7 +90,8 @@ pub async fn task_progress_websocket( } } } - _ => { + Ok(false) | Err(_) => { + tracing::warn!("WebSocket auth failed: Invalid API key"); axum::response::Response::builder() .status(403) .body("Invalid API key".into()) diff --git a/rust-api/src/handlers/youtube.rs b/rust-api/src/handlers/youtube.rs index c2687e74..861a6a16 100644 --- a/rust-api/src/handlers/youtube.rs +++ b/rust-api/src/handlers/youtube.rs @@ -269,7 +269,7 @@ pub async fn subscribe_to_youtube_channel( } // Helper function to get YouTube channel info using Backend service -async fn get_youtube_channel_info(channel_id: &str) -> Result, AppError> { +pub async fn get_youtube_channel_info(channel_id: &str) -> Result, AppError> { println!("Getting channel info for {} from Backend service", channel_id); // Get Backend URL from environment variable diff --git a/rust-api/src/main.rs b/rust-api/src/main.rs index f7602102..b458cb30 100644 --- a/rust-api/src/main.rs +++ b/rust-api/src/main.rs @@ -1,7 +1,4 @@ use axum::{ - extract::State, - http::StatusCode, - response::Json, routing::{delete, get, post, put}, Router, }; @@ -9,7 +6,6 @@ use std::net::SocketAddr; use tokio::signal; use tower::ServiceBuilder; use tower_http::{ - cors::{Any, CorsLayer}, trace::TraceLayer, compression::CompressionLayer, }; @@ -165,6 +161,7 @@ fn create_app(state: AppState) -> Router { .level(tracing::Level::INFO)) ) .layer(CompressionLayer::new()) + .layer(axum::extract::DefaultBodyLimit::max(2 * 1024 * 1024 * 1024)) // 2GB limit for massive backup files ) .with_state(state) } @@ -196,6 +193,10 @@ fn create_data_routes() -> Router { .route("/increment_listen_time/{user_id}", put(handlers::podcasts::increment_listen_time)) .route("/get_playback_speed", post(handlers::podcasts::get_playback_speed)) .route("/add_podcast", post(handlers::podcasts::add_podcast)) + .route("/update_podcast_info", put(handlers::podcasts::update_podcast_info)) + .route("/{podcast_id}/merge", post(handlers::podcasts::merge_podcasts)) + .route("/{podcast_id}/unmerge/{target_podcast_id}", post(handlers::podcasts::unmerge_podcast)) + .route("/{podcast_id}/merged", get(handlers::podcasts::get_merged_podcasts)) .route("/remove_podcast", post(handlers::podcasts::remove_podcast)) .route("/remove_podcast_id", post(handlers::podcasts::remove_podcast_id)) .route("/remove_podcast_name", post(handlers::podcasts::remove_podcast_by_name)) @@ -220,6 +221,7 @@ fn create_data_routes() -> Router { .route("/download_status/{user_id}", get(handlers::podcasts::download_status)) .route("/podcast_episodes", get(handlers::podcasts::podcast_episodes)) .route("/get_podcast_id_from_ep_name", get(handlers::podcasts::get_podcast_id_from_ep_name)) + .route("/get_episode_id_ep_name", get(handlers::podcasts::get_episode_id_ep_name)) .route("/get_episode_metadata", post(handlers::podcasts::get_episode_metadata)) .route("/fetch_podcasting_2_data", get(handlers::podcasts::fetch_podcasting_2_data)) .route("/get_auto_download_status", post(handlers::podcasts::get_auto_download_status)) @@ -227,12 +229,15 @@ fn create_data_routes() -> Router { .route("/get_play_episode_details", post(handlers::podcasts::get_play_episode_details)) .route("/fetch_podcasting_2_pod_data", get(handlers::podcasts::fetch_podcasting_2_pod_data)) .route("/mark_episode_completed", post(handlers::podcasts::mark_episode_completed)) + .route("/update_episode_duration", post(handlers::podcasts::update_episode_duration)) // Bulk episode operations .route("/bulk_mark_episodes_completed", post(handlers::episodes::bulk_mark_episodes_completed)) .route("/bulk_save_episodes", post(handlers::episodes::bulk_save_episodes)) .route("/bulk_queue_episodes", post(handlers::episodes::bulk_queue_episodes)) .route("/bulk_download_episodes", post(handlers::episodes::bulk_download_episodes)) .route("/bulk_delete_downloaded_episodes", post(handlers::episodes::bulk_delete_downloaded_episodes)) + .route("/share_episode/{episode_id}", post(handlers::episodes::share_episode)) + .route("/episode_by_url/{url_key}", get(handlers::episodes::get_episode_by_url_key)) .route("/increment_played/{user_id}", put(handlers::podcasts::increment_played)) .route("/record_listen_duration", post(handlers::podcasts::record_listen_duration)) .route("/get_podcast_id_from_ep_id", get(handlers::podcasts::get_podcast_id_from_ep_id)) @@ -243,6 +248,8 @@ fn create_data_routes() -> Router { .route("/home_overview", get(handlers::podcasts::home_overview)) .route("/get_playlists", get(handlers::podcasts::get_playlists)) .route("/get_playlist_episodes", get(handlers::podcasts::get_playlist_episodes)) + .route("/create_playlist", post(handlers::playlists::create_playlist)) + .route("/delete_playlist", delete(handlers::playlists::delete_playlist)) .route("/get_podcast_details", get(handlers::podcasts::get_podcast_details)) .route("/get_podcast_details_dynamic", get(handlers::podcasts::get_podcast_details_dynamic)) .route("/podpeople/host_podcasts", get(handlers::podcasts::get_host_podcasts)) @@ -277,6 +284,8 @@ fn create_data_routes() -> Router { .route("/get_email_settings", get(handlers::settings::get_email_settings)) .route("/send_test_email", post(handlers::settings::send_test_email)) .route("/send_email", post(handlers::settings::send_email)) + .route("/reset_password_create_code", post(handlers::auth::reset_password_create_code)) + .route("/verify_and_reset_password", post(handlers::auth::verify_and_reset_password)) .route("/get_api_info/{user_id}", get(handlers::settings::get_api_info)) .route("/create_api_key", post(handlers::settings::create_api_key)) .route("/delete_api_key", delete(handlers::settings::delete_api_key)) @@ -294,7 +303,7 @@ fn create_data_routes() -> Router { .route("/add_gpodder_server", post(handlers::settings::add_gpodder_server)) .route("/get_gpodder_settings/{user_id}", get(handlers::settings::get_gpodder_settings)) .route("/check_gpodder_settings/{user_id}", get(handlers::settings::check_gpodder_settings)) - .route("/remove_podcast_sync", post(handlers::settings::remove_podcast_sync)) + .route("/remove_podcast_sync", delete(handlers::settings::remove_podcast_sync)) .route("/gpodder/status", get(handlers::sync::gpodder_status)) .route("/gpodder/toggle", post(handlers::sync::gpodder_toggle)) .route("/refresh_pods", get(handlers::refresh::refresh_pods_admin)) @@ -308,9 +317,13 @@ fn create_data_routes() -> Router { .route("/user/notification_settings", get(handlers::settings::get_notification_settings)) .route("/user/notification_settings", put(handlers::settings::update_notification_settings)) .route("/user/set_playback_speed", post(handlers::settings::set_playback_speed_user)) + .route("/user/set_global_podcast_cover_preference", post(handlers::settings::set_global_podcast_cover_preference)) + .route("/user/get_podcast_cover_preference", get(handlers::settings::get_global_podcast_cover_preference)) .route("/user/test_notification", post(handlers::settings::test_notification)) .route("/add_oidc_provider", post(handlers::settings::add_oidc_provider)) + .route("/update_oidc_provider/{provider_id}", put(handlers::settings::update_oidc_provider)) .route("/list_oidc_providers", get(handlers::settings::list_oidc_providers)) + .route("/remove_oidc_provider", post(handlers::settings::remove_oidc_provider)) .route("/startpage", get(handlers::settings::get_startpage)) .route("/startpage", post(handlers::settings::update_startpage)) .route("/person/subscribe/{user_id}/{person_id}", post(handlers::settings::subscribe_to_person)) @@ -325,10 +338,26 @@ fn create_data_routes() -> Router { .route("/remove_category", post(handlers::settings::remove_category)) .route("/add_category", post(handlers::settings::add_category)) .route("/podcast/set_playback_speed", post(handlers::settings::set_podcast_playback_speed)) + .route("/podcast/set_cover_preference", post(handlers::settings::set_podcast_cover_preference)) + .route("/podcast/clear_cover_preference", post(handlers::settings::clear_podcast_cover_preference)) .route("/podcast/toggle_notifications", put(handlers::settings::toggle_podcast_notifications)) .route("/podcast/notification_status", post(handlers::podcasts::get_notification_status)) .route("/rss_key", get(handlers::settings::get_user_rss_key)) .route("/verify_mfa", post(handlers::settings::verify_mfa)) + .route("/schedule_backup", post(handlers::settings::schedule_backup)) + .route("/get_scheduled_backup", post(handlers::settings::get_scheduled_backup)) + .route("/list_backup_files", post(handlers::settings::list_backup_files)) + .route("/restore_backup_file", post(handlers::settings::restore_from_backup_file)) + .route("/manual_backup_to_directory", post(handlers::settings::manual_backup_to_directory)) + .route("/get_unmatched_podcasts", post(handlers::settings::get_unmatched_podcasts)) + .route("/update_podcast_index_id", post(handlers::settings::update_podcast_index_id)) + .route("/ignore_podcast_index_id", post(handlers::settings::ignore_podcast_index_id)) + .route("/get_ignored_podcasts", post(handlers::settings::get_ignored_podcasts)) + // Language preference endpoints + .route("/get_user_language", get(handlers::settings::get_user_language)) + .route("/update_user_language", put(handlers::settings::update_user_language)) + .route("/get_available_languages", get(handlers::settings::get_available_languages)) + .route("/get_server_default_language", get(handlers::settings::get_server_default_language)) // Add more data routes as needed } @@ -339,7 +368,7 @@ fn create_podcast_routes() -> Router { fn create_episode_routes() -> Router { Router::new() - // Add episode routes as needed + .route("/{episode_id}/download", get(handlers::episodes::download_episode_file)) } fn create_playlist_routes() -> Router { diff --git a/rust-api/src/models.rs b/rust-api/src/models.rs index 0d6a98b6..4205b140 100644 --- a/rust-api/src/models.rs +++ b/rust-api/src/models.rs @@ -155,15 +155,39 @@ pub struct UpdateEpisodeRequest { #[derive(Debug, Deserialize)] pub struct CreatePlaylistRequest { + pub user_id: i32, pub name: String, pub description: Option, pub podcast_ids: Option>, - pub include_unplayed: Option, - pub include_partially_played: Option, - pub include_played: Option, + pub include_unplayed: bool, + pub include_partially_played: bool, + pub include_played: bool, + pub play_progress_min: Option, + pub play_progress_max: Option, + pub time_filter_hours: Option, pub min_duration: Option, pub max_duration: Option, + pub sort_order: String, + pub group_by_podcast: bool, pub max_episodes: Option, + pub icon_name: String, +} + +#[derive(Debug, Serialize)] +pub struct CreatePlaylistResponse { + pub detail: String, + pub playlist_id: i32, +} + +#[derive(Debug, Deserialize)] +pub struct DeletePlaylistRequest { + pub user_id: i32, + pub playlist_id: i32, +} + +#[derive(Debug, Serialize)] +pub struct DeletePlaylistResponse { + pub detail: String, } // Search models @@ -193,6 +217,29 @@ pub struct UserStats { pub downloaded_episodes: i32, } +// Language models +#[derive(Debug, Serialize, Deserialize)] +pub struct AvailableLanguage { + pub code: String, + pub name: String, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct LanguageUpdateRequest { + pub user_id: i32, + pub language: String, +} + +#[derive(Debug, Serialize)] +pub struct UserLanguageResponse { + pub language: String, +} + +#[derive(Debug, Serialize)] +pub struct AvailableLanguagesResponse { + pub languages: Vec, +} + // API-specific podcast models to match Python responses #[derive(Debug, Serialize, Deserialize)] pub struct PodcastResponse { @@ -299,6 +346,7 @@ pub struct SavedEpisode { pub queued: bool, pub downloaded: bool, pub is_youtube: bool, + pub podcastid: Option, } #[derive(Debug, Serialize, Deserialize)] @@ -306,6 +354,20 @@ pub struct SavedEpisodesResponse { pub saved_episodes: Vec, } +#[derive(Debug, Serialize, Deserialize)] +pub struct PlaylistInfo { + pub name: String, + pub description: String, + pub episode_count: i32, + pub icon_name: String, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct PlaylistEpisodesResponse { + pub episodes: Vec, + pub playlist_info: PlaylistInfo, +} + #[derive(Debug, Serialize)] pub struct SaveEpisodeResponse { pub detail: String, diff --git a/rust-api/src/redis_client.rs b/rust-api/src/redis_client.rs index 3569188d..03903497 100644 --- a/rust-api/src/redis_client.rs +++ b/rust-api/src/redis_client.rs @@ -1,6 +1,5 @@ use redis::{aio::MultiplexedConnection, AsyncCommands, Client}; -use std::time::Duration; -use crate::{config::Config, error::{AppError, AppResult}}; +use crate::{config::Config, error::AppResult}; #[derive(Clone)] pub struct RedisClient { diff --git a/rust-api/src/redis_manager.rs b/rust-api/src/redis_manager.rs index 1f89bbeb..ab5028b2 100644 --- a/rust-api/src/redis_manager.rs +++ b/rust-api/src/redis_manager.rs @@ -1,5 +1,5 @@ use serde_json::Value; -use crate::{error::{AppError, AppResult}, redis_client::RedisClient}; +use crate::{error::AppResult, redis_client::RedisClient}; pub struct ImportProgressManager { redis_client: RedisClient, @@ -83,6 +83,7 @@ impl NotificationManager { match platform { "ntfy" => self.send_ntfy_notification(settings).await, "gotify" => self.send_gotify_notification(settings).await, + "http" => self.send_http_notification(settings).await, _ => { println!("Unsupported notification platform: {}", platform); Ok(false) @@ -120,7 +121,16 @@ impl NotificationManager { let response = request.send().await?; - Ok(response.status().is_success()) + let status = response.status(); + let is_success = status.is_success(); + + if !is_success { + let response_text = response.text().await.unwrap_or_default(); + println!("Ntfy notification failed with status: {} - Response: {}", + status, response_text); + } + + Ok(is_success) } async fn send_gotify_notification(&self, settings: &serde_json::Value) -> AppResult { @@ -149,4 +159,91 @@ impl NotificationManager { Ok(response.status().is_success()) } + + async fn send_http_notification(&self, settings: &serde_json::Value) -> AppResult { + let http_url = settings.get("http_url").and_then(|v| v.as_str()).unwrap_or(""); + let http_token = settings.get("http_token").and_then(|v| v.as_str()).unwrap_or(""); + let http_method = settings.get("http_method").and_then(|v| v.as_str()).unwrap_or("POST"); + + if http_url.is_empty() { + println!("HTTP URL is empty, cannot send notification"); + return Ok(false); + } + + let client = reqwest::Client::new(); + + // Build the request based on method + let request_builder = match http_method.to_uppercase().as_str() { + "GET" => { + // For GET requests, add message as query parameter + let url_with_params = if http_url.contains('?') { + format!("{}&message={}", http_url, urlencoding::encode("Test notification from PinePods")) + } else { + format!("{}?message={}", http_url, urlencoding::encode("Test notification from PinePods")) + }; + client.get(&url_with_params) + }, + "POST" | _ => { + // For POST requests, send JSON payload + let payload = if http_url.contains("api.telegram.org") { + // Special handling for Telegram Bot API + let chat_id = if let Some(chat_id_str) = http_token.split(':').nth(1) { + // Extract chat_id from token if it contains chat_id (format: bot_token:chat_id) + chat_id_str + } else { + // Default chat_id - user needs to configure this properly + "YOUR_CHAT_ID" + }; + + serde_json::json!({ + "chat_id": chat_id, + "text": "Test notification from PinePods" + }) + } else { + // Generic JSON payload + serde_json::json!({ + "title": "PinePods Test", + "message": "Test notification from PinePods", + "text": "Test notification from PinePods" + }) + }; + + client.post(http_url) + .header("Content-Type", "application/json") + .json(&payload) + } + }; + + // Add authorization header if token is provided + let request_builder = if !http_token.is_empty() { + if http_url.contains("api.telegram.org") { + // For Telegram, token goes in URL path, not header + request_builder + } else { + // For other services, add as Bearer token + request_builder.header("Authorization", format!("Bearer {}", http_token)) + } + } else { + request_builder + }; + + match request_builder.send().await { + Ok(response) => { + let status = response.status(); + let is_success = status.is_success(); + + if !is_success { + let response_text = response.text().await.unwrap_or_default(); + println!("HTTP notification failed with status: {} - Response: {}", + status, response_text); + } + + Ok(is_success) + }, + Err(e) => { + println!("HTTP notification request failed: {}", e); + Ok(false) + } + } + } } \ No newline at end of file diff --git a/rust-api/src/services/auth.rs b/rust-api/src/services/auth.rs index 8faddebe..15f403e1 100644 --- a/rust-api/src/services/auth.rs +++ b/rust-api/src/services/auth.rs @@ -12,21 +12,4 @@ pub fn verify_password(password: &str, stored_hash: &str) -> AppResult { Ok(()) => Ok(true), Err(_) => Ok(false), } -} - -/// Hash password using Argon2 - matches Python's passlib CryptContext -pub fn hash_password(password: &str) -> AppResult { - use argon2::{PasswordHasher, password_hash::SaltString}; - use rand::Rng; - - let argon2 = Argon2::default(); - let mut salt_bytes = [0u8; 32]; - rand::rng().fill(&mut salt_bytes); - let salt = SaltString::encode_b64(&salt_bytes) - .map_err(|e| AppError::Auth(format!("Failed to create salt: {}", e)))?; - - let password_hash = argon2.hash_password(password.as_bytes(), &salt) - .map_err(|e| AppError::Auth(format!("Failed to hash password: {}", e)))?; - - Ok(password_hash.to_string()) } \ No newline at end of file diff --git a/rust-api/src/services/feeds.rs b/rust-api/src/services/feeds.rs deleted file mode 100644 index 0b96fe7c..00000000 --- a/rust-api/src/services/feeds.rs +++ /dev/null @@ -1,23 +0,0 @@ -use feed_rs::parser; -use reqwest::Client; -use crate::error::{AppError, AppResult}; - -/// RSS feed fetching and parsing - will replace Python's feedparser -pub async fn fetch_and_parse_feed(url: &str) -> AppResult { - let client = Client::new(); - - let response = client - .get(url) - .header("User-Agent", "PinePods/1.0") - .send() - .await?; - - if !response.status().is_success() { - return Err(AppError::FeedParsing(format!("HTTP error: {}", response.status()))); - } - - let content = response.bytes().await?; - - parser::parse(&content[..]) - .map_err(|e| AppError::FeedParsing(format!("Feed parsing error: {}", e))) -} \ No newline at end of file diff --git a/rust-api/src/services/mod.rs b/rust-api/src/services/mod.rs index dd429b0e..8d7b8d77 100644 --- a/rust-api/src/services/mod.rs +++ b/rust-api/src/services/mod.rs @@ -1,5 +1,4 @@ pub mod auth; -pub mod feeds; pub mod podcast; pub mod scheduler; pub mod task_manager; diff --git a/rust-api/src/services/podcast.rs b/rust-api/src/services/podcast.rs index 031fe552..bd242168 100644 --- a/rust-api/src/services/podcast.rs +++ b/rust-api/src/services/podcast.rs @@ -41,7 +41,7 @@ async fn refresh_podcast_internal(db_pool: &DatabasePool, podcast_id: i32) -> Ap let episodes = db_pool.add_episodes( podcast_id, &podcast.feed_url, - &podcast.artwork_url, + podcast.artwork_url.as_deref().unwrap_or(""), podcast.auto_download, podcast.username.as_deref(), podcast.password.as_deref(), @@ -112,7 +112,7 @@ async fn refresh_single_podcast(db_pool: &DatabasePool, podcast: &PodcastForRefr db_pool.add_episodes( podcast.id, &podcast.feed_url, - &podcast.artwork_url, + podcast.artwork_url.as_deref().unwrap_or(""), podcast.auto_download, podcast.username.as_deref(), podcast.password.as_deref(), @@ -188,7 +188,7 @@ async fn get_podcast_for_refresh(db_pool: &DatabasePool, podcast_id: i32) -> App id: row.try_get("PodcastID")?, name: "".to_string(), // Not needed for refresh feed_url: row.try_get("FeedURL")?, - artwork_url: row.try_get("ArtworkURL").unwrap_or_default(), + artwork_url: row.try_get::, _>("ArtworkURL").unwrap_or_default(), auto_download: row.try_get("AutoDownload")?, username: row.try_get("Username").ok(), password: row.try_get("Password").ok(), @@ -217,7 +217,7 @@ async fn get_podcast_for_refresh(db_pool: &DatabasePool, podcast_id: i32) -> App id: row.try_get("PodcastID")?, name: "".to_string(), // Not needed for refresh feed_url: row.try_get("FeedURL")?, - artwork_url: row.try_get("ArtworkURL").unwrap_or_default(), + artwork_url: row.try_get::, _>("ArtworkURL").unwrap_or_default(), auto_download: row.try_get("AutoDownload")?, username: row.try_get("Username").ok(), password: row.try_get("Password").ok(), @@ -251,7 +251,7 @@ async fn get_all_podcasts_for_refresh(db_pool: &DatabasePool) -> AppResult, _>("ArtworkURL").unwrap_or_default(), auto_download: row.try_get("AutoDownload")?, username: row.try_get("Username").ok(), password: row.try_get("Password").ok(), @@ -278,7 +278,7 @@ async fn get_all_podcasts_for_refresh(db_pool: &DatabasePool) -> AppResult, _>("ArtworkURL").unwrap_or_default(), auto_download: row.try_get("AutoDownload")?, username: row.try_get("Username").ok(), password: row.try_get("Password").ok(), diff --git a/rust-api/src/services/scheduler.rs b/rust-api/src/services/scheduler.rs index 9097fd25..15896b9e 100644 --- a/rust-api/src/services/scheduler.rs +++ b/rust-api/src/services/scheduler.rs @@ -1,6 +1,5 @@ use crate::{ error::AppResult, - database::DatabasePool, handlers::{refresh, tasks}, AppState, }; @@ -75,13 +74,6 @@ impl BackgroundScheduler { Ok(()) } - pub async fn stop(&mut self) -> AppResult<()> { - info!("🛑 Stopping background task scheduler..."); - self.scheduler.shutdown().await?; - info!("✅ Background task scheduler stopped"); - Ok(()) - } - // Direct function calls instead of HTTP requests async fn run_refresh_pods(state: Arc) -> AppResult<()> { // Call refresh_pods function directly @@ -99,9 +91,9 @@ impl BackgroundScheduler { warn!("⚠️ Nextcloud sync failed during scheduled refresh: {}", e); } - // Also run playlist updates - if let Err(e) = tasks::update_playlists_internal(&state).await { - warn!("⚠️ Playlist update failed during scheduled refresh: {}", e); + // Update playlist episode counts (replaces complex playlist content updates) + if let Err(e) = state.db_pool.update_playlist_episode_counts().await { + warn!("⚠️ Playlist episode count update failed during scheduled refresh: {}", e); } } Err(e) => { @@ -144,6 +136,16 @@ impl BackgroundScheduler { pub async fn run_startup_tasks(state: Arc) -> AppResult<()> { info!("🚀 Running initial startup tasks..."); + // Initialize OIDC provider from environment variables if configured + if let Err(e) = state.db_pool.init_oidc_from_env(&state.config.oidc).await { + warn!("⚠️ OIDC initialization failed: {}", e); + } + + // Create missing default playlists for existing users + if let Err(e) = state.db_pool.create_missing_default_playlists().await { + warn!("⚠️ Creating missing default playlists failed: {}", e); + } + // Run an immediate refresh to ensure data is current on startup if let Err(e) = Self::run_refresh_pods(state.clone()).await { warn!("⚠️ Initial startup refresh failed: {}", e); diff --git a/rust-api/src/services/task_manager.rs b/rust-api/src/services/task_manager.rs index 6eb0e54d..2352c69b 100644 --- a/rust-api/src/services/task_manager.rs +++ b/rust-api/src/services/task_manager.rs @@ -1,8 +1,7 @@ use crate::{error::AppResult, redis_client::RedisClient}; use redis::AsyncCommands; use serde::{Deserialize, Serialize}; -use std::collections::HashMap; -use tokio::sync::{broadcast, RwLock}; +use tokio::sync::broadcast; use uuid::Uuid; #[derive(Debug, Clone, Serialize, Deserialize)] diff --git a/rust-api/src/services/tasks.rs b/rust-api/src/services/tasks.rs index e61215f2..ff5e88da 100644 --- a/rust-api/src/services/tasks.rs +++ b/rust-api/src/services/tasks.rs @@ -1,12 +1,11 @@ use crate::{ - error::{AppError, AppResult}, - services::task_manager::{TaskManager, TaskInfo}, + error::AppResult, + services::task_manager::TaskManager, database::DatabasePool, }; use futures::Future; use serde_json::Value; use std::sync::Arc; -use tokio::task::JoinHandle; use sqlx::Row; // New function that actually downloads an episode and waits for completion @@ -77,6 +76,15 @@ async fn download_episode_and_wait( if !download_dir.exists() { std::fs::create_dir_all(&download_dir) .map_err(|e| crate::error::AppError::Internal(format!("Failed to create download directory: {}", e)))?; + + // Set ownership using PUID/PGID environment variables + let puid: u32 = std::env::var("PUID").unwrap_or_else(|_| "1000".to_string()).parse().unwrap_or(1000); + let pgid: u32 = std::env::var("PGID").unwrap_or_else(|_| "1000".to_string()).parse().unwrap_or(1000); + + // Set directory ownership (ignore errors for NFS mounts) + let _ = std::process::Command::new("chown") + .args(&[format!("{}:{}", puid, pgid), download_dir.to_string_lossy().to_string()]) + .output(); } let pub_date_str = if let Some(date) = pub_date { @@ -108,6 +116,18 @@ async fn download_episode_and_wait( .map_err(|e| crate::error::AppError::Internal(format!("Failed to write file: {}", e)))?; } + // Close the file before setting ownership + drop(file); + + // Set file ownership using PUID/PGID environment variables + let puid: u32 = std::env::var("PUID").unwrap_or_else(|_| "1000".to_string()).parse().unwrap_or(1000); + let pgid: u32 = std::env::var("PGID").unwrap_or_else(|_| "1000".to_string()).parse().unwrap_or(1000); + + // Set file ownership (ignore errors for NFS mounts) + let _ = std::process::Command::new("chown") + .args(&[format!("{}:{}", puid, pgid), file_path.to_string_lossy().to_string()]) + .output(); + // Record download in database let file_size = tokio::fs::metadata(&file_path).await .map(|m| m.len() as i64) @@ -244,109 +264,6 @@ impl ProgressReporter for TaskProgressReporter { } } -// Example task implementations - -pub async fn download_episode_task( - episode_id: String, - url: String, - reporter: Arc, -) -> AppResult { - reporter - .update_progress(10.0, Some("Starting download...".to_string())) - .await?; - - // Simulate download progress - for i in 1..=9 { - tokio::time::sleep(tokio::time::Duration::from_millis(100)).await; - let progress = 10.0 + (i as f64 * 10.0); - reporter - .update_progress( - progress, - Some(format!("Downloading... {}%", progress as u32)), - ) - .await?; - } - - reporter - .update_progress(100.0, Some("Download completed".to_string())) - .await?; - - Ok(serde_json::json!({ - "episode_id": episode_id, - "url": url, - "file_path": "/downloads/episode.mp3" - })) -} - -pub async fn import_opml_task( - opml_content: String, - reporter: Arc, -) -> AppResult { - reporter - .update_progress(5.0, Some("Parsing OPML...".to_string())) - .await?; - - // Simulate OPML parsing and processing - let feed_count = 10; // Would parse OPML to get actual count - - for i in 1..=feed_count { - let progress = 5.0 + ((i as f64 / feed_count as f64) * 90.0); - reporter - .update_progress( - progress, - Some(format!("Processing feed {} of {}", i, feed_count)), - ) - .await?; - - // Simulate processing each feed - tokio::time::sleep(tokio::time::Duration::from_millis(200)).await; - } - - reporter - .update_progress(100.0, Some("OPML import completed".to_string())) - .await?; - - Ok(serde_json::json!({ - "imported_feeds": feed_count, - "success": true - })) -} - -pub async fn refresh_all_feeds_task( - user_id: i32, - reporter: Arc, -) -> AppResult { - reporter - .update_progress(5.0, Some("Fetching user podcasts...".to_string())) - .await?; - - // Simulate fetching podcasts from database - let podcast_count = 25; // Would fetch from DB - - for i in 1..=podcast_count { - let progress = 5.0 + ((i as f64 / podcast_count as f64) * 90.0); - reporter - .update_progress( - progress, - Some(format!("Refreshing podcast {} of {}", i, podcast_count)), - ) - .await?; - - // Simulate refreshing each podcast - tokio::time::sleep(tokio::time::Duration::from_millis(100)).await; - } - - reporter - .update_progress(100.0, Some("All feeds refreshed".to_string())) - .await?; - - Ok(serde_json::json!({ - "user_id": user_id, - "refreshed_count": podcast_count, - "success": true - })) -} - impl TaskSpawner { // Download task spawners for podcast episodes and YouTube videos pub async fn spawn_download_podcast_episode(&self, episode_id: i32, user_id: i32) -> AppResult { @@ -446,6 +363,15 @@ impl TaskSpawner { if !download_dir.exists() { std::fs::create_dir_all(&download_dir) .map_err(|e| crate::error::AppError::internal(&format!("Failed to create download directory: {}", e)))?; + + // Set ownership using PUID/PGID environment variables + let puid: u32 = std::env::var("PUID").unwrap_or_else(|_| "1000".to_string()).parse().unwrap_or(1000); + let pgid: u32 = std::env::var("PGID").unwrap_or_else(|_| "1000".to_string()).parse().unwrap_or(1000); + + // Set directory ownership (ignore errors for NFS mounts) + let _ = std::process::Command::new("chown") + .args(&[format!("{}:{}", puid, pgid), download_dir.to_string_lossy().to_string()]) + .output(); } // Format date for filename (like Python version) @@ -496,8 +422,8 @@ impl TaskSpawner { if total_size > 0 { let progress = 25.0 + (downloaded as f64 / total_size as f64) * 65.0; // 25% to 90% - // Only send WebSocket updates every 5% to avoid overwhelming the browser - if progress - last_reported_progress >= 5.0 || downloaded == total_size { + // Only send WebSocket updates every 10% to avoid overwhelming the browser + if progress - last_reported_progress >= 10.0 || downloaded == total_size { let status_message = format!("Downloading {}", episode_title); task_manager.update_task_progress_with_details( &task_id_clone, @@ -517,6 +443,15 @@ impl TaskSpawner { drop(file); // Close the file handle before metadata operations + // Set file ownership using PUID/PGID environment variables + let puid: u32 = std::env::var("PUID").unwrap_or_else(|_| "1000".to_string()).parse().unwrap_or(1000); + let pgid: u32 = std::env::var("PGID").unwrap_or_else(|_| "1000".to_string()).parse().unwrap_or(1000); + + // Set file ownership (ignore errors for NFS mounts) + let _ = std::process::Command::new("chown") + .args(&[format!("{}:{}", puid, pgid), file_path.to_string_lossy().to_string()]) + .output(); + let status_message = format!("Processing {}", episode_title); task_manager.update_task_progress_with_details(&task_id_clone, 85.0, Some(status_message), Some(episode_id), Some("podcast_download".to_string()), Some(episode_title.clone())).await?; @@ -1027,4 +962,80 @@ async fn download_artwork(url: &str) -> Result, Box, + password: Option, + ) -> AppResult { + let task_type = "add_podcast_episodes".to_string(); + + self.spawn_task( + task_type, + user_id, + move |task_id, task_manager, db_pool| { + Box::pin(async move { + println!("Starting episode processing for podcast {} (user {})", podcast_id, user_id); + + // Update progress - starting + task_manager.update_task_progress(&task_id, 10.0, Some("Fetching podcast feed...".to_string())).await?; + + // Add episodes to the existing podcast + match db_pool.add_episodes( + podcast_id, + &feed_url, + &artwork_url, + false, // auto_download + username.as_deref(), + password.as_deref(), + ).await { + Ok(first_episode_id) => { + // Update progress - fetching count + task_manager.update_task_progress(&task_id, 80.0, Some("Counting episodes...".to_string())).await?; + + // Count episodes for logging and notification + let episode_count: i64 = match &db_pool { + crate::database::DatabasePool::Postgres(pool) => { + sqlx::query_scalar(r#"SELECT COUNT(*) FROM "Episodes" WHERE podcastid = $1"#) + .bind(podcast_id) + .fetch_one(pool) + .await? + } + crate::database::DatabasePool::MySQL(pool) => { + sqlx::query_scalar("SELECT COUNT(*) FROM Episodes WHERE PodcastID = ?") + .bind(podcast_id) + .fetch_one(pool) + .await? + } + }; + + // Final progress update + task_manager.update_task_progress(&task_id, 100.0, Some(format!("Added {} episodes", episode_count))).await?; + + println!("✅ Added {} episodes for podcast {} (user {})", episode_count, podcast_id, user_id); + + Ok(serde_json::json!({ + "podcast_id": podcast_id, + "user_id": user_id, + "episode_count": episode_count, + "first_episode_id": first_episode_id, + "status": "completed" + })) + } + Err(e) => { + println!("Failed to add episodes for podcast {}: {}", podcast_id, e); + task_manager.update_task_progress(&task_id, 0.0, Some(format!("Failed to add episodes: {}", e))).await?; + Err(e) + } + } + }) + }, + ).await + } } \ No newline at end of file diff --git a/startup/nginx.conf b/startup/nginx.conf index 51d6c5e6..0af64d5a 100644 --- a/startup/nginx.conf +++ b/startup/nginx.conf @@ -113,9 +113,9 @@ http { proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-Proto $scheme; - # Increase timeouts for potentially longer operations - proxy_read_timeout 300; - proxy_send_timeout 300; + # Increase timeouts for potentially longer operations (30 minutes for large syncs) + proxy_read_timeout 1800; + proxy_send_timeout 1800; if ($request_method = 'OPTIONS') { add_header 'Access-Control-Allow-Origin' '*' always; diff --git a/startup/services/gpodder-api.toml b/startup/services/gpodder-api.toml new file mode 100644 index 00000000..93d92c2d --- /dev/null +++ b/startup/services/gpodder-api.toml @@ -0,0 +1,17 @@ +command = "/usr/local/bin/gpodder-api" +start-after = ["pinepods-api.toml"] +stdout = "${HORUST_STDOUT_MODE}" +stderr = "${HORUST_STDERR_MODE}" + +[restart] +strategy = "always" +backoff = "1s" +attempts = 0 + +[environment] +keep-env = true +additional = { DB_USER = "${DB_USER}", DB_HOST = "${DB_HOST}", DB_PORT = "${DB_PORT}", DB_NAME = "${DB_NAME}", DB_PASSWORD = "${DB_PASSWORD}", SERVER_PORT = "8042" } + +[termination] +signal = "TERM" +wait = "10s" \ No newline at end of file diff --git a/startup/services/nginx.toml b/startup/services/nginx.toml new file mode 100644 index 00000000..9733de20 --- /dev/null +++ b/startup/services/nginx.toml @@ -0,0 +1,16 @@ +command = "nginx -g 'daemon off;'" +start-after = ["gpodder-api.toml"] +stdout = "${HORUST_STDOUT_MODE}" +stderr = "${HORUST_STDERR_MODE}" + +[restart] +strategy = "always" +backoff = "1s" +attempts = 0 + +[environment] +keep-env = true + +[termination] +signal = "TERM" +wait = "5s" \ No newline at end of file diff --git a/startup/services/pinepods-api.toml b/startup/services/pinepods-api.toml new file mode 100644 index 00000000..2428aa56 --- /dev/null +++ b/startup/services/pinepods-api.toml @@ -0,0 +1,16 @@ +command = "/usr/local/bin/pinepods-api" +stdout = "${HORUST_STDOUT_MODE}" +stderr = "${HORUST_STDERR_MODE}" + +[restart] +strategy = "always" +backoff = "1s" +attempts = 0 + +[environment] +keep-env = true +additional = { DB_USER = "${DB_USER}", DB_PASSWORD = "${DB_PASSWORD}", DB_HOST = "${DB_HOST}", DB_NAME = "${DB_NAME}", DB_PORT = "${DB_PORT}", DB_TYPE = "${DB_TYPE}", FULLNAME = "${FULLNAME}", USERNAME = "${USERNAME}", EMAIL = "${EMAIL}", PASSWORD = "${PASSWORD}", REVERSE_PROXY = "${REVERSE_PROXY}", SEARCH_API_URL = "${SEARCH_API_URL}", PEOPLE_API_URL = "${PEOPLE_API_URL}", PINEPODS_PORT = "${PINEPODS_PORT}", PROXY_PROTOCOL = "${PROXY_PROTOCOL}", DEBUG_MODE = "${DEBUG_MODE}", VALKEY_HOST = "${VALKEY_HOST}", VALKEY_PORT = "${VALKEY_PORT}" } + +[termination] +signal = "TERM" +wait = "5s" \ No newline at end of file diff --git a/startup/setup_database_new.py b/startup/setup_database_new.py index 09abd757..0b42d6b9 100755 --- a/startup/setup_database_new.py +++ b/startup/setup_database_new.py @@ -71,7 +71,10 @@ def wait_for_postgresql_ready(): def wait_for_mysql_ready(): """Wait for MySQL/MariaDB to be ready to accept connections""" import time - import mysql.connector + try: + import mariadb as mysql_connector + except ImportError: + import mysql.connector db_host = os.environ.get("DB_HOST", "127.0.0.1") db_port = int(os.environ.get("DB_PORT", "3306")) @@ -85,13 +88,14 @@ def wait_for_mysql_ready(): while attempt <= max_attempts: try: - # Try to connect to MySQL - conn = mysql.connector.connect( + # Try to connect to MySQL/MariaDB + conn = mysql_connector.connect( host=db_host, port=db_port, user=db_user, password=db_password, - connection_timeout=3 + connect_timeout=3, + autocommit=True ) cursor = conn.cursor() # Test if MySQL is ready to accept queries diff --git a/startup/startup.sh b/startup/startup.sh index 88562f6e..61fc2fbe 100755 --- a/startup/startup.sh +++ b/startup/startup.sh @@ -4,7 +4,7 @@ set -e # Exit immediately if a command exits with a non-zero status # Function to handle shutdown shutdown() { echo "Shutting down..." - supervisorctl stop all + pkill -TERM horust exit 0 } @@ -30,6 +30,31 @@ export PROXY_PROTOCOL=$PROXY_PROTOCOL export DEBUG_MODE=${DEBUG_MODE:-'False'} export VALKEY_HOST=${VALKEY_HOST:-'valkey'} export VALKEY_PORT=${VALKEY_PORT:-'6379'} +export DEFAULT_LANGUAGE=${DEFAULT_LANGUAGE:-'en'} + +# Save user's HOSTNAME to SERVER_URL before Docker overwrites it with container ID +# This preserves the user-configured server URL for RSS feed generation +export SERVER_URL=${HOSTNAME} + +# Export OIDC environment variables +export OIDC_DISABLE_STANDARD_LOGIN=${OIDC_DISABLE_STANDARD_LOGIN:-'false'} +export OIDC_PROVIDER_NAME=${OIDC_PROVIDER_NAME} +export OIDC_CLIENT_ID=${OIDC_CLIENT_ID} +export OIDC_CLIENT_SECRET=${OIDC_CLIENT_SECRET} +export OIDC_AUTHORIZATION_URL=${OIDC_AUTHORIZATION_URL} +export OIDC_TOKEN_URL=${OIDC_TOKEN_URL} +export OIDC_USER_INFO_URL=${OIDC_USER_INFO_URL} +export OIDC_BUTTON_TEXT=${OIDC_BUTTON_TEXT} +export OIDC_SCOPE=${OIDC_SCOPE} +export OIDC_BUTTON_COLOR=${OIDC_BUTTON_COLOR} +export OIDC_BUTTON_TEXT_COLOR=${OIDC_BUTTON_TEXT_COLOR} +export OIDC_ICON_SVG=${OIDC_ICON_SVG} +export OIDC_NAME_CLAIM=${OIDC_NAME_CLAIM} +export OIDC_EMAIL_CLAIM=${OIDC_EMAIL_CLAIM} +export OIDC_USERNAME_CLAIM=${OIDC_USERNAME_CLAIM} +export OIDC_ROLES_CLAIM=${OIDC_ROLES_CLAIM} +export OIDC_USER_ROLE=${OIDC_USER_ROLE} +export OIDC_ADMIN_ROLE=${OIDC_ADMIN_ROLE} # Print admin info if default admin is used if [[ $FULLNAME == 'Pinepods Admin' ]]; then @@ -100,7 +125,7 @@ mkdir -p /pinepods/cache mkdir -p /opt/pinepods/backups mkdir -p /opt/pinepods/downloads mkdir -p /opt/pinepods/certs -mkdir -p /var/log/supervisor # Make sure supervisor log directory exists +mkdir -p /var/log/pinepods # Make sure log directory exists # Database Setup echo "Using $DB_TYPE database" @@ -123,24 +148,32 @@ else echo "Skipping exim setup as user/group doesn't exist on this system" fi -# Start all services with supervisord -echo "Starting supervisord..." +# Set up environment variables for Horust logging modes if [[ $DEBUG_MODE == "true" ]]; then - supervisord -c /pinepods/startup/supervisordebug.conf + export HORUST_STDOUT_MODE="STDOUT" + export HORUST_STDERR_MODE="STDERR" + echo "Starting Horust in debug mode (logs to stdout)..." else - supervisord -c /pinepods/startup/supervisord.conf + export HORUST_STDOUT_MODE="/var/log/pinepods/service.log" + export HORUST_STDERR_MODE="/var/log/pinepods/service.log" + echo "Starting Horust in production mode (logs to files)..." fi -# Set permissions for download and backup directories +# Set permissions for download and backup directories BEFORE starting services # Only do this if PUID and PGID are set if [[ -n "$PUID" && -n "$PGID" ]]; then - echo "Setting permissions for download and backup directories..." + echo "Setting permissions for download and backup directories...(Be patient this might take a while if you have a lot of downloads)" chown -R ${PUID}:${PGID} /opt/pinepods/downloads chown -R ${PUID}:${PGID} /opt/pinepods/backups else echo "Skipping permission setting as PUID/PGID are not set" fi -# Keep container running -echo "PinePods startup complete, running supervisord in foreground..." -exec supervisorctl tail -f all +# Copy service configurations to Horust directory +cp /pinepods/startup/services/*.toml /etc/horust/services/ + +# Start all services with Horust +echo "Starting services with Horust..." +echo "PinePods startup complete, running Horust in foreground..." +exec horust --services-path /etc/horust/services/ + diff --git a/startup/supervisordebug.conf b/startup/supervisordebug.conf index fdcbf1b9..e8094f75 100644 --- a/startup/supervisordebug.conf +++ b/startup/supervisordebug.conf @@ -14,34 +14,6 @@ redirect_stderr=true stdout_logfile=/dev/stdout stdout_logfile_maxbytes=0 -[program:startup_tasks] -command=/pinepods/startup/app_startup.sh -autostart=true -autorestart=false -redirect_stderr=true -startsecs=10 -stdout_logfile=/dev/stdout -stdout_logfile_maxbytes=0 - -[program:nightly_tasks] -command=/pinepods/startup/call_nightly_tasks.sh -autostart=true -autorestart=false -redirect_stderr=true -startsecs=10 -stopwaitsecs=20 -stdout_logfile=/dev/stdout -stdout_logfile_maxbytes=0 - -[program:podcast_refresh] -command=/pinepods/startup/call_refresh_endpoint.sh -autostart=true -autorestart=false -redirect_stderr=true -startsecs=10 -stdout_logfile=/dev/stdout -stdout_logfile_maxbytes=0 - [program:client_api] command=/usr/local/bin/pinepods-api redirect_stderr=true diff --git a/validate_db.py b/validate_db.py new file mode 100644 index 00000000..7067d8f2 --- /dev/null +++ b/validate_db.py @@ -0,0 +1,48 @@ +#!/usr/bin/env python3 +""" +Simple wrapper for database validation that reads from environment variables +""" + +import os +import sys +import subprocess +import psycopg +def main(): + # Get database config from environment variables (same as the app uses) + db_type = os.environ.get('DB_TYPE', 'postgresql') + db_host = os.environ.get('DB_HOST', 'localhost') + db_port = os.environ.get('DB_PORT', '5432' if db_type == 'postgresql' else '3306') + db_user = os.environ.get('DB_USER', 'postgres' if db_type == 'postgresql' else 'root') + db_password = os.environ.get('DB_PASSWORD', '') + db_name = os.environ.get('DB_NAME', 'pinepods_database') + + if not db_password: + print("Error: DB_PASSWORD environment variable is required") + sys.exit(1) + + # Build command + cmd = [ + sys.executable, + 'database_functions/validate_database.py', + '--db-type', db_type, + '--db-host', db_host, + '--db-port', db_port, + '--db-user', db_user, + '--db-password', db_password, + '--db-name', db_name + ] + + # Add verbose flag if requested + if '--verbose' in sys.argv or '-v' in sys.argv: + cmd.append('--verbose') + + print(f"Validating {db_type} database: {db_user}@{db_host}:{db_port}/{db_name}") + print("Running database validation...") + print() + + # Run the validator + result = subprocess.run(cmd) + sys.exit(result.returncode) + +if __name__ == '__main__': + main() diff --git a/web/.cargo/config.toml b/web/.cargo/config.toml new file mode 100644 index 00000000..2981b36b --- /dev/null +++ b/web/.cargo/config.toml @@ -0,0 +1,2 @@ +[target.wasm32-unknown-unknown] +rustflags = ["--cfg=web_sys_unstable_apis", "--cfg=getrandom_backend=\"wasm_js\""] \ No newline at end of file diff --git a/web/Cargo.lock b/web/Cargo.lock index c3dc448a..2bb4305b 100644 --- a/web/Cargo.lock +++ b/web/Cargo.lock @@ -4,18 +4,18 @@ version = 4 [[package]] name = "addr2line" -version = "0.24.1" +version = "0.25.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5fb1d8e4442bd405fdfd1dacb42792696b0cf9cb15882e5d097b742a676d375" +checksum = "1b5d307320b3181d6d7954e663bd7c774a838b8220fe0593c86d9fb09f498b4b" dependencies = [ "gimli", ] [[package]] name = "adler2" -version = "2.0.0" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" [[package]] name = "aho-corasick" @@ -28,9 +28,9 @@ dependencies = [ [[package]] name = "ammonia" -version = "4.1.0" +version = "4.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ada2ee439075a3e70b6992fce18ac4e407cd05aea9ca3f75d2c0b0c20bbb364" +checksum = "17e913097e1a2124b46746c980134e8c954bc17a6a59bb3fde96f088d126dde6" dependencies = [ "cssparser", "html5ever", @@ -39,12 +39,6 @@ dependencies = [ "url", ] -[[package]] -name = "android-tzdata" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" - [[package]] name = "android_system_properties" version = "0.1.5" @@ -56,9 +50,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.98" +version = "1.0.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" +checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" [[package]] name = "anymap" @@ -97,9 +91,9 @@ dependencies = [ [[package]] name = "async-channel" -version = "2.3.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89b47800b0be77592da0afd425cc03468052844aff33b84e33cc696f64e77b6a" +checksum = "924ed96dd52d1b75e9c1a3e6275715fd320f5f9439fb5a4a11fa51f4221158d2" dependencies = [ "concurrent-queue", "event-listener-strategy", @@ -109,14 +103,15 @@ dependencies = [ [[package]] name = "async-executor" -version = "1.13.1" +version = "1.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30ca9a001c1e8ba5149f91a74362376cc6bc5b919d92d988668657bd570bdcec" +checksum = "497c00e0fd83a72a79a39fcbd8e3e2f055d6f6c7e025f3b3d91f4f8e76527fb8" dependencies = [ "async-task", "concurrent-queue", "fastrand", "futures-lite", + "pin-project-lite", "slab", ] @@ -126,7 +121,7 @@ version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c" dependencies = [ - "async-channel 2.3.1", + "async-channel 2.5.0", "async-executor", "async-io", "async-lock", @@ -137,11 +132,11 @@ dependencies = [ [[package]] name = "async-io" -version = "2.3.4" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "444b0228950ee6501b3568d3c93bf1176a1fdbc3b758dcd9475046d30f4dc7e8" +checksum = "456b8a8feb6f42d237746d4b3e9a178494627745c3c56c6ea55d92ba50d026fc" dependencies = [ - "async-lock", + "autocfg", "cfg-if", "concurrent-queue", "futures-io", @@ -150,26 +145,25 @@ dependencies = [ "polling", "rustix", "slab", - "tracing", - "windows-sys 0.59.0", + "windows-sys 0.61.1", ] [[package]] name = "async-lock" -version = "3.4.0" +version = "3.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff6e472cdea888a4bd64f342f09b3f50e1886d32afe8df3d663c01140b811b18" +checksum = "5fd03604047cee9b6ce9de9f70c6cd540a0520c813cbd49bae61f33ab80ed1dc" dependencies = [ - "event-listener 5.3.1", + "event-listener 5.4.1", "event-listener-strategy", "pin-project-lite", ] [[package]] name = "async-std" -version = "1.13.1" +version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "730294c1c08c2e0f85759590518f6333f0d5a0a766a27d519c1b244c3dfd8a24" +checksum = "2c8e079a4ab67ae52b7403632e4618815d6db36d2a010cfe41b02c1b1578f93b" dependencies = [ "async-channel 1.9.0", "async-global-executor", @@ -199,15 +193,15 @@ checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" [[package]] name = "atom_syndication" -version = "0.12.4" +version = "0.12.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a3a5ed3201df5658d1aa45060c5a57dc9dba8a8ada20d696d67cb0c479ee043" +checksum = "d2f68d23e2cb4fd958c705b91a6b4c80ceeaf27a9e11651272a8389d5ce1a4a3" dependencies = [ "chrono", "derive_builder", "diligent-date-parser", "never", - "quick-xml 0.36.2", + "quick-xml", ] [[package]] @@ -218,15 +212,15 @@ checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" [[package]] name = "autocfg" -version = "1.3.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" +checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" [[package]] name = "backtrace" -version = "0.3.74" +version = "0.3.76" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" +checksum = "bb531853791a215d7c62a30daf0dde835f381ab5de4589cfe7c649d2cbe92bd6" dependencies = [ "addr2line", "cfg-if", @@ -234,7 +228,7 @@ dependencies = [ "miniz_oxide", "object", "rustc-demangle", - "windows-targets", + "windows-link", ] [[package]] @@ -245,9 +239,9 @@ checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] name = "base64ct" -version = "1.6.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" +checksum = "55248b47b0caf0546f7988906588779981c43bb1bc9d0c44087278f80cdb44ba" [[package]] name = "bincode" @@ -260,9 +254,9 @@ dependencies = [ [[package]] name = "bitflags" -version = "2.6.0" +version = "2.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" +checksum = "2261d10cca569e4643e526d8dc2e62e433cc8aba21ab764233731f8d369bf394" [[package]] name = "blake2" @@ -284,11 +278,11 @@ dependencies = [ [[package]] name = "blocking" -version = "1.6.1" +version = "1.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "703f41c54fc768e63e091340b424302bb1c29ef4aa0c7f10fe849dfb114d29ea" +checksum = "e83f8d02be6967315521be875afa792a316e28d57b5a2d401897e2a7921b7f21" dependencies = [ - "async-channel 2.3.1", + "async-channel 2.5.0", "async-task", "futures-io", "futures-lite", @@ -303,36 +297,31 @@ checksum = "cfa8873f51c92e232f9bac4065cddef41b714152812bfc5f7672ba16d6ef8cd9" [[package]] name = "bumpalo" -version = "3.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" - -[[package]] -name = "byteorder" -version = "1.5.0" +version = "3.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" +checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" [[package]] name = "bytes" -version = "1.7.2" +version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "428d9aa8fbc0670b7b8d6030a7fadd0f86151cae55e4dbbece15f3780a3dfaf3" +checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" [[package]] name = "cc" -version = "1.1.21" +version = "1.2.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07b1695e2c7e8fc85310cde85aeaab7e3097f593c91d209d3f9df76c928100f0" +checksum = "e1354349954c6fc9cb0deab020f27f783cf0b604e8bb754dc4658ecf0d29c35f" dependencies = [ + "find-msvc-tools", "shlex", ] [[package]] name = "cfg-if" -version = "1.0.0" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +checksum = "2fd1289c04a9ea8cb22300a459a72a385d7c73d3259e2ed7dcb2af674838cfa9" [[package]] name = "cfg-match" @@ -342,11 +331,10 @@ checksum = "8100e46ff92eb85bf6dc2930c73f2a4f7176393c84a9446b3d501e1b354e7b34" [[package]] name = "chrono" -version = "0.4.41" +version = "0.4.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c469d952047f47f91b68d1cba3f10d63c11d73e4636f24f08daf0278abf01c4d" +checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2" dependencies = [ - "android-tzdata", "iana-time-zone", "js-sys", "num-traits", @@ -357,23 +345,12 @@ dependencies = [ [[package]] name = "chrono-tz" -version = "0.10.3" +version = "0.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "efdce149c370f133a071ca8ef6ea340b7b88748ab0810097a9e2976eaa34b4f3" +checksum = "a6139a8597ed92cf816dfb33f5dd6cf0bb93a6adc938f11039f371bc5bcd26c3" dependencies = [ "chrono", - "chrono-tz-build", - "phf", -] - -[[package]] -name = "chrono-tz-build" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e94fea34d77a245229e7746bd2beb786cd2a896f306ff491fb8cecb3074b10a7" -dependencies = [ - "parse-zoneinfo", - "phf_codegen", + "phf 0.12.1", ] [[package]] @@ -403,18 +380,18 @@ checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "cpufeatures" -version = "0.2.14" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "608697df725056feaccfa42cffdaeeec3fccc4ffc38358ecd19b243e716a78e0" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" dependencies = [ "libc", ] [[package]] name = "crossbeam-utils" -version = "0.8.20" +version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" [[package]] name = "crypto-common" @@ -435,7 +412,7 @@ dependencies = [ "cssparser-macros", "dtoa-short", "itoa", - "phf", + "phf 0.11.3", "smallvec", ] @@ -446,42 +423,77 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331" dependencies = [ "quote", - "syn 2.0.90", + "syn 2.0.106", +] + +[[package]] +name = "darling" +version = "0.20.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee" +dependencies = [ + "darling_core 0.20.11", + "darling_macro 0.20.11", ] [[package]] name = "darling" -version = "0.20.10" +version = "0.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989" +checksum = "9cdf337090841a411e2a7f3deb9187445851f91b309c0c0a29e05f74a00a48c0" dependencies = [ - "darling_core", - "darling_macro", + "darling_core 0.21.3", + "darling_macro 0.21.3", ] [[package]] name = "darling_core" -version = "0.20.10" +version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5" +checksum = "0d00b9596d185e565c2207a0b01f8bd1a135483d02d9b7b0a54b11da8d53412e" dependencies = [ "fnv", "ident_case", "proc-macro2", "quote", "strsim", - "syn 2.0.90", + "syn 2.0.106", +] + +[[package]] +name = "darling_core" +version = "0.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1247195ecd7e3c85f83c8d2a366e4210d588e802133e1e355180a9870b517ea4" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn 2.0.106", +] + +[[package]] +name = "darling_macro" +version = "0.20.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" +dependencies = [ + "darling_core 0.20.11", + "quote", + "syn 2.0.106", ] [[package]] name = "darling_macro" -version = "0.20.10" +version = "0.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" +checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81" dependencies = [ - "darling_core", + "darling_core 0.21.3", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] @@ -492,43 +504,43 @@ checksum = "2a2330da5de22e8a3cb63252ce2abb30116bf5265e89c0e01bc17015ce30a476" [[package]] name = "deranged" -version = "0.3.11" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +checksum = "a41953f86f8a05768a6cda24def994fd2f424b04ec5c719cf89989779f199071" dependencies = [ "powerfmt", - "serde", + "serde_core", ] [[package]] name = "derive_builder" -version = "0.20.1" +version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd33f37ee6a119146a1781d3356a7c26028f83d779b2e04ecd45fdc75c76877b" +checksum = "507dfb09ea8b7fa618fcf76e953f4f5e192547945816d5358edffe39f6f94947" dependencies = [ "derive_builder_macro", ] [[package]] name = "derive_builder_core" -version = "0.20.1" +version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7431fa049613920234f22c47fdc33e6cf3ee83067091ea4277a3f8c4587aae38" +checksum = "2d5bcf7b024d6835cfb3d473887cd966994907effbe9227e8c8219824d06c4e8" dependencies = [ - "darling", + "darling 0.20.11", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] name = "derive_builder_macro" -version = "0.20.1" +version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4abae7035bf79b9877b779505d8cf3749285b80c43941eda66604841889451dc" +checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c" dependencies = [ "derive_builder_core", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] @@ -544,9 +556,9 @@ dependencies = [ [[package]] name = "diligent-date-parser" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6cf7fe294274a222363f84bcb63cdea762979a0443b4cf1f4f8fd17c86b1182" +checksum = "c8ede7d79366f419921e2e2f67889c12125726692a313bffb474bd5f37a581e9" dependencies = [ "chrono", ] @@ -559,7 +571,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] @@ -579,33 +591,33 @@ dependencies = [ [[package]] name = "dyn-clone" -version = "1.0.19" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c7a8fb8a9fbf66c1f703fe16184d10ca0ee9d23be5b4436400408ba54a95005" +checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555" [[package]] name = "encoding_rs" -version = "0.8.34" +version = "0.8.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" dependencies = [ "cfg-if", ] [[package]] name = "equivalent" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" [[package]] name = "errno" -version = "0.3.9" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.61.1", ] [[package]] @@ -616,9 +628,9 @@ checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" [[package]] name = "event-listener" -version = "5.3.1" +version = "5.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6032be9bd27023a771701cc49f9f053c751055f71efb2e0ae5c15809093675ba" +checksum = "e13b66accf52311f30a0db42147dadea9850cb48cd070028831ae5f5d4b856ab" dependencies = [ "concurrent-queue", "parking", @@ -627,19 +639,25 @@ dependencies = [ [[package]] name = "event-listener-strategy" -version = "0.5.2" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f214dc438f977e6d4e3500aaa277f5ad94ca83fbbd9b1a15713ce2344ccc5a1" +checksum = "8be9f3dfaaffdae2972880079a491a1a8bb7cbed0b8dd7a347f668b4150a3b93" dependencies = [ - "event-listener 5.3.1", + "event-listener 5.4.1", "pin-project-lite", ] [[package]] name = "fastrand" -version = "2.1.1" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8c02a5121d4ea3eb16a80748c74f5549a5665e4c21333c6098f283870fbdea6" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "find-msvc-tools" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ced73b1dacfc750a6db6c0a0c3a3853c8b41997e2e2c563dc90804ae6867959" [[package]] name = "fnv" @@ -649,9 +667,9 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "form_urlencoded" -version = "1.2.1" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" dependencies = [ "percent-encoding", ] @@ -716,9 +734,9 @@ checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" [[package]] name = "futures-lite" -version = "2.3.0" +version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52527eb5074e35e9339c6b4e8d12600c7128b68fb25dcb9fa9dec18f7c25f3a5" +checksum = "f78e10609fe0e0b3f4157ffab1876319b5b0db102a2c60dc4626306dc46b44ad" dependencies = [ "fastrand", "futures-core", @@ -735,7 +753,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] @@ -780,45 +798,45 @@ dependencies = [ [[package]] name = "getopts" -version = "0.2.21" +version = "0.2.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14dbbfd5c71d70241ecf9e6f13737f7b5ce823821063188d7e46c41d371eebd5" +checksum = "cfe4fbac503b8d1f88e6676011885f34b7174f46e59956bba534ba83abded4df" dependencies = [ "unicode-width", ] [[package]] name = "getrandom" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" dependencies = [ "cfg-if", "js-sys", "libc", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi", "wasm-bindgen", ] [[package]] name = "getrandom" -version = "0.3.3" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" +checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" dependencies = [ "cfg-if", "js-sys", "libc", "r-efi", - "wasi 0.14.2+wasi-0.2.4", + "wasip2", "wasm-bindgen", ] [[package]] name = "gimli" -version = "0.31.0" +version = "0.32.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32085ea23f3234fc7846555e85283ba4de91e21016dc0455a16286d87a292d64" +checksum = "e629b9b98ef3dd8afe6ca2bd0f89306cec16d43d907889945bc5d6687f2f13c7" [[package]] name = "gloo" @@ -1023,7 +1041,7 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "903f432be5ba34427eac5e16048ef65604a82061fe93789f2212afc73d8617d6" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.2.16", "gloo-events 0.2.0", "gloo-utils 0.2.0", "serde", @@ -1107,7 +1125,7 @@ dependencies = [ "futures-core", "futures-sink", "gloo-utils 0.2.0", - "http 1.1.0", + "http 1.3.1", "js-sys", "pin-project", "serde", @@ -1280,7 +1298,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] @@ -1291,21 +1309,15 @@ checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" [[package]] name = "hashbrown" -version = "0.14.5" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" +checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d" [[package]] name = "hermit-abi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" - -[[package]] -name = "hermit-abi" -version = "0.4.0" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbf6a919d6cf397374f7dfeeea91d974c7c0a7221d0d0f4f20d859d329e53fcc" +checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c" [[package]] name = "hex" @@ -1315,12 +1327,11 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] name = "html5ever" -version = "0.31.0" +version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "953cbbe631aae7fc0a112702ad5d3aaf09da38beaf45ea84610d6e1c358f569c" +checksum = "55d958c2f74b664487a2035fe1dadb032c48718a03b63f3ab0b8537db8549ed4" dependencies = [ "log", - "mac", "markup5ever", "match_token", ] @@ -1349,25 +1360,36 @@ dependencies = [ [[package]] name = "http" -version = "1.1.0" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565" dependencies = [ "bytes", "fnv", "itoa", ] +[[package]] +name = "i18nrs" +version = "0.1.7" +source = "git+https://github.com/madeofpendletonwool/i18n-rs#c0e755729b3c8ff220d8417fb126972fa0665a46" +dependencies = [ + "serde_json", + "web-sys", + "yew 0.21.0", +] + [[package]] name = "iana-time-zone" -version = "0.1.61" +version = "0.1.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "235e081f3925a06703c2d0117ea8b91f042756fd6e7a6e5d901e8ca1a996b220" +checksum = "33e57f83510bb73707521ebaffa789ec8caf86f9657cad665b092b581d40e9fb" dependencies = [ "android_system_properties", "core-foundation-sys", "iana-time-zone-haiku", "js-sys", + "log", "wasm-bindgen", "windows-core", ] @@ -1383,21 +1405,22 @@ dependencies = [ [[package]] name = "icu_collections" -version = "1.5.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47" dependencies = [ "displaydoc", + "potential_utf", "yoke", "zerofrom", "zerovec", ] [[package]] -name = "icu_locid" -version = "1.5.0" +name = "icu_locale_core" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a" dependencies = [ "displaydoc", "litemap", @@ -1406,31 +1429,11 @@ dependencies = [ "zerovec", ] -[[package]] -name = "icu_locid_transform" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" -dependencies = [ - "displaydoc", - "icu_locid", - "icu_locid_transform_data", - "icu_provider", - "tinystr", - "zerovec", -] - -[[package]] -name = "icu_locid_transform_data" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" - [[package]] name = "icu_normalizer" -version = "1.5.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979" dependencies = [ "displaydoc", "icu_collections", @@ -1438,67 +1441,54 @@ dependencies = [ "icu_properties", "icu_provider", "smallvec", - "utf16_iter", - "utf8_iter", - "write16", "zerovec", ] [[package]] name = "icu_normalizer_data" -version = "1.5.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" +checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3" [[package]] name = "icu_properties" -version = "1.5.1" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b" dependencies = [ "displaydoc", "icu_collections", - "icu_locid_transform", + "icu_locale_core", "icu_properties_data", "icu_provider", - "tinystr", + "potential_utf", + "zerotrie", "zerovec", ] [[package]] name = "icu_properties_data" -version = "1.5.0" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" +checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632" [[package]] name = "icu_provider" -version = "1.5.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af" dependencies = [ "displaydoc", - "icu_locid", - "icu_provider_macros", + "icu_locale_core", "stable_deref_trait", "tinystr", "writeable", "yoke", "zerofrom", + "zerotrie", "zerovec", ] -[[package]] -name = "icu_provider_macros" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - [[package]] name = "ident_case" version = "1.0.1" @@ -1507,9 +1497,9 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "idna" -version = "1.0.3" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" dependencies = [ "idna_adapter", "smallvec", @@ -1518,9 +1508,9 @@ dependencies = [ [[package]] name = "idna_adapter" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" dependencies = [ "icu_normalizer", "icu_properties", @@ -1533,17 +1523,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8a9aa791c7b5a71b636b7a68207fdebf171ddfc593d9c8506ec4cbc527b6a84" dependencies = [ "implicit-clone-derive", - "indexmap 2.5.0", + "indexmap 2.11.4", ] [[package]] name = "implicit-clone-derive" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9311685eb9a34808bbb0608ad2fcab9ae216266beca5848613e95553ac914e3b" +checksum = "699c1b6d335e63d0ba5c1e1c7f647371ce989c3bcbe1f7ed2b85fa56e3bd1a21" dependencies = [ "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] @@ -1559,26 +1549,38 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.5.0" +version = "2.11.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68b900aa2f7301e21c36462b170ee99994de34dff39a4a6a528e80e7376d07e5" +checksum = "4b0f83760fb341a774ed326568e19f5a863af4a952def8c39f9ab92fd95b88e5" dependencies = [ "equivalent", - "hashbrown 0.14.5", + "hashbrown 0.16.0", "serde", + "serde_core", +] + +[[package]] +name = "io-uring" +version = "0.7.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "046fa2d4d00aea763528b4950358d0ead425372445dc8ff86312b3c69ff7727b" +dependencies = [ + "bitflags", + "cfg-if", + "libc", ] [[package]] name = "itoa" -version = "1.0.11" +version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" +checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" [[package]] name = "js-sys" -version = "0.3.77" +version = "0.3.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" +checksum = "b011eec8cc36da2aab2d5cff675ec18454fad408585853910a202391cf9f8e65" dependencies = [ "once_cell", "wasm-bindgen", @@ -1601,27 +1603,27 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "libc" -version = "0.2.158" +version = "0.2.176" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439" +checksum = "58f929b4d672ea937a23a1ab494143d968337a5f47e56d0815df1e0890ddf174" [[package]] name = "linux-raw-sys" -version = "0.4.14" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" +checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" [[package]] name = "litemap" -version = "0.7.4" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" +checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956" [[package]] name = "lock_api" -version = "0.4.12" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" +checksum = "96936507f153605bddfcda068dd804796c84324ed2510809e5b2a624c81da765" dependencies = [ "autocfg", "scopeguard", @@ -1629,9 +1631,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.27" +version = "0.4.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" +checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432" dependencies = [ "value-bag", ] @@ -1650,9 +1652,9 @@ checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d" [[package]] name = "markup5ever" -version = "0.16.1" +version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0a8096766c229e8c88a3900c9b44b7e06aa7f7343cc229158c3e58ef8f9973a" +checksum = "311fe69c934650f8f19652b3946075f0fc41ad8757dbb68f1ca14e7900ecc1c3" dependencies = [ "log", "tendril", @@ -1661,13 +1663,13 @@ dependencies = [ [[package]] name = "match_token" -version = "0.1.0" +version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88a9689d8d44bf9964484516275f5cd4c9b59457a6940c1d5d0ecbb94510a36b" +checksum = "ac84fd3f360fcc43dc5f5d186f02a94192761a080e8bc58621ad4d12296a58cf" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] @@ -1678,19 +1680,30 @@ checksum = "ae960838283323069879657ca3de837e9f7bbb4c7bf6ea7f1b290d5e9476d2e0" [[package]] name = "memchr" -version = "2.7.4" +version = "2.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" +checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" [[package]] name = "miniz_oxide" -version = "0.8.0" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2d80299ef12ff69b16a84bb182e3b9df68b5a91574d3d4fa6e41b65deec4df1" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" dependencies = [ "adler2", ] +[[package]] +name = "mio" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c" +dependencies = [ + "libc", + "wasi", + "windows-sys 0.59.0", +] + [[package]] name = "never" version = "0.1.0" @@ -1720,28 +1733,28 @@ dependencies = [ [[package]] name = "num_cpus" -version = "1.16.0" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" +checksum = "91df4bbde75afed763b708b7eee1e8e7651e02d97f6d5dd763e89367e957b23b" dependencies = [ - "hermit-abi 0.3.9", + "hermit-abi", "libc", ] [[package]] name = "object" -version = "0.36.4" +version = "0.37.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "084f1a5821ac4c651660a94a7153d27ac9d8a53736203f58b31945ded098070a" +checksum = "ff76201f031d8863c38aa7f905eca4f53abbfa15f609db4277d44cd8938f33fe" dependencies = [ "memchr", ] [[package]] name = "once_cell" -version = "1.19.0" +version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" [[package]] name = "parking" @@ -1751,9 +1764,9 @@ checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" [[package]] name = "parking_lot" -version = "0.12.3" +version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" +checksum = "70d58bf43669b5795d1576d0641cfb6fbb2057bf629506267a92807158584a13" dependencies = [ "lock_api", "parking_lot_core", @@ -1761,9 +1774,9 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.10" +version = "0.9.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" +checksum = "bc838d2a56b5b1a6c25f55575dfc605fabb63bb2365f6c2353ef9159aa69e4a5" dependencies = [ "cfg-if", "libc", @@ -1772,15 +1785,6 @@ dependencies = [ "windows-targets", ] -[[package]] -name = "parse-zoneinfo" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f2a05b18d44e2957b88f96ba460715e295bc1d7510468a2f3d3b44535d26c24" -dependencies = [ - "regex", -] - [[package]] name = "password-hash" version = "0.5.0" @@ -1794,37 +1798,46 @@ dependencies = [ [[package]] name = "percent-encoding" -version = "2.3.1" +version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" [[package]] name = "phf" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc" +checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078" dependencies = [ "phf_macros", - "phf_shared", + "phf_shared 0.11.3", +] + +[[package]] +name = "phf" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "913273894cec178f401a31ec4b656318d95473527be05c0752cc41cdc32be8b7" +dependencies = [ + "phf_shared 0.12.1", ] [[package]] name = "phf_codegen" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8d39688d359e6b34654d328e262234662d16cc0f60ec8dcbe5e718709342a5a" +checksum = "aef8048c789fa5e851558d709946d6d79a8ff88c0440c587967f8e94bfb1216a" dependencies = [ "phf_generator", - "phf_shared", + "phf_shared 0.11.3", ] [[package]] name = "phf_generator" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48e4cc64c2ad9ebe670cb8fd69dd50ae301650392e81c05f9bfcb2d5bdbc24b0" +checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d" dependencies = [ - "phf_shared", + "phf_shared 0.11.3", "rand 0.8.5", ] @@ -1835,46 +1848,55 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f84ac04429c13a7ff43785d75ad27569f2951ce0ffd30a3321230db2fc727216" dependencies = [ "phf_generator", - "phf_shared", + "phf_shared 0.11.3", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] name = "phf_shared" -version = "0.11.2" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" +dependencies = [ + "siphasher", +] + +[[package]] +name = "phf_shared" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b" +checksum = "06005508882fb681fd97892ecff4b7fd0fee13ef1aa569f8695dae7ab9099981" dependencies = [ "siphasher", ] [[package]] name = "pin-project" -version = "1.1.5" +version = "1.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" +checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.5" +version = "1.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" +checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] name = "pin-project-lite" -version = "0.2.14" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" [[package]] name = "pin-utils" @@ -1906,17 +1928,25 @@ dependencies = [ [[package]] name = "polling" -version = "3.7.3" +version = "3.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc2790cd301dec6cd3b7a025e4815cf825724a51c98dccfe6a3e55f05ffb6511" +checksum = "5d0e4f59085d47d8241c88ead0f274e8a0cb551f3625263c05eb8dd897c34218" dependencies = [ "cfg-if", "concurrent-queue", - "hermit-abi 0.4.0", + "hermit-abi", "pin-project-lite", "rustix", - "tracing", - "windows-sys 0.59.0", + "windows-sys 0.61.1", +] + +[[package]] +name = "potential_utf" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "84df19adbe5b5a0782edcab45899906947ab039ccf4573713735ee7de1e6b08a" +dependencies = [ + "zerovec", ] [[package]] @@ -1927,9 +1957,9 @@ checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "ppv-lite86" -version = "0.2.20" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" dependencies = [ "zerocopy", ] @@ -1942,12 +1972,12 @@ checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" [[package]] name = "prettyplease" -version = "0.2.22" +version = "0.2.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "479cf940fbbb3426c32c5d5176f62ad57549a0bb84773423ba8be9d089f5faba" +checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" dependencies = [ "proc-macro2", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] @@ -1986,9 +2016,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.92" +version = "1.0.101" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" +checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de" dependencies = [ "unicode-ident", ] @@ -2031,19 +2061,9 @@ checksum = "007d8adb5ddab6f8e3f491ac63566a7d5002cc7ed73901f72057943fa71ae1ae" [[package]] name = "quick-xml" -version = "0.36.2" +version = "0.37.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7649a7b4df05aed9ea7ec6f628c67c9953a43869b8bc50929569b2999d443fe" -dependencies = [ - "encoding_rs", - "memchr", -] - -[[package]] -name = "quick-xml" -version = "0.37.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f22f29bdff3987b4d8632ef95fd6424ec7e4e0a57e2f4fc63e489e75357f6a03" +checksum = "331e97a1af0bf59823e6eadffe373d7b27f485be8748f71471c662c1f269b7fb" dependencies = [ "encoding_rs", "memchr", @@ -2051,18 +2071,18 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.37" +version = "1.0.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" +checksum = "ce25767e7b499d1b604768e7cde645d14cc8584231ea6b295e9c9eb22c02e1d1" dependencies = [ "proc-macro2", ] [[package]] name = "r-efi" -version = "5.2.0" +version = "5.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" +checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" [[package]] name = "rand" @@ -2075,9 +2095,9 @@ dependencies = [ [[package]] name = "rand" -version = "0.9.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fbfd9d094a40bf3ae768db9361049ace4c0e04a4fd6b359518bd7b73a73dd97" +checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" dependencies = [ "rand_chacha", "rand_core 0.9.3", @@ -2099,7 +2119,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.2.16", ] [[package]] @@ -2108,43 +2128,43 @@ version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" dependencies = [ - "getrandom 0.3.3", + "getrandom 0.3.4", ] [[package]] name = "redox_syscall" -version = "0.5.4" +version = "0.5.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0884ad60e090bf1345b93da0a5de8923c93884cd03f40dfcfddd3b4bee661853" +checksum = "5407465600fb0548f1442edf71dd20683c6ed326200ace4b1ef0763521bb3b77" dependencies = [ "bitflags", ] [[package]] name = "ref-cast" -version = "1.0.24" +version = "1.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a0ae411dbe946a674d89546582cea4ba2bb8defac896622d6496f14c23ba5cf" +checksum = "f354300ae66f76f1c85c5f84693f0ce81d747e2c3f21a45fef496d89c960bf7d" dependencies = [ "ref-cast-impl", ] [[package]] name = "ref-cast-impl" -version = "1.0.24" +version = "1.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1165225c21bff1f3bbce98f5a1f889949bc902d3575308cc7b0de30b4f6d27c7" +checksum = "b7186006dcb21920990093f30e3dea63b7d6e977bf1256be20c3563a5db070da" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] name = "regex" -version = "1.11.1" +version = "1.12.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" +checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4" dependencies = [ "aho-corasick", "memchr", @@ -2154,9 +2174,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.9" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" +checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c" dependencies = [ "aho-corasick", "memchr", @@ -2165,9 +2185,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" +checksum = "caf4aa5b0f434c91fe5c7f1ecb6a5ece2130b02ad2a590589dda5146df959001" [[package]] name = "route-recognizer" @@ -2184,39 +2204,39 @@ dependencies = [ "atom_syndication", "derive_builder", "never", - "quick-xml 0.37.1", + "quick-xml", ] [[package]] name = "rustc-demangle" -version = "0.1.24" +version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" +checksum = "56f7d92ca342cea22a06f2121d944b4fd82af56988c270852495420f961d4ace" [[package]] name = "rustix" -version = "0.38.37" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8acb788b847c24f28525660c4d7758620a7210875711f79e7f663cc152726811" +checksum = "cd15f8a2c5551a84d56efdc1cd049089e409ac19a3072d5037a17fd70719ff3e" dependencies = [ "bitflags", "errno", "libc", "linux-raw-sys", - "windows-sys 0.52.0", + "windows-sys 0.61.1", ] [[package]] name = "rustversion" -version = "1.0.17" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" [[package]] name = "ryu" -version = "1.0.18" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" +checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" [[package]] name = "schemars" @@ -2232,9 +2252,9 @@ dependencies = [ [[package]] name = "schemars" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1375ba8ef45a6f15d83fa8748f1079428295d403d6ea991d09ab100155fbc06d" +checksum = "82d20c4491bc164fa2f6c5d44565947a52ad80b9505d8e36f8d54c27c739fcd0" dependencies = [ "dyn-clone", "ref-cast", @@ -2250,10 +2270,11 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "serde" -version = "1.0.219" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" dependencies = [ + "serde_core", "serde_derive", ] @@ -2279,27 +2300,37 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + [[package]] name = "serde_derive" -version = "1.0.219" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] name = "serde_json" -version = "1.0.140" +version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373" +checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c" dependencies = [ "itoa", "memchr", "ryu", "serde", + "serde_core", ] [[package]] @@ -2316,19 +2347,18 @@ dependencies = [ [[package]] name = "serde_with" -version = "3.14.0" +version = "3.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2c45cd61fefa9db6f254525d46e392b852e0e61d9a1fd36e5bd183450a556d5" +checksum = "aa66c845eee442168b2c8134fec70ac50dc20e760769c8ba0ad1319ca1959b04" dependencies = [ "base64", "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.5.0", + "indexmap 2.11.4", "schemars 0.9.0", - "schemars 1.0.3", - "serde", - "serde_derive", + "schemars 1.0.4", + "serde_core", "serde_json", "serde_with_macros", "time", @@ -2336,14 +2366,14 @@ dependencies = [ [[package]] name = "serde_with_macros" -version = "3.14.0" +version = "3.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de90945e6565ce0d9a25098082ed4ee4002e047cb59892c318d66821e14bb30f" +checksum = "b91a903660542fced4e99881aa481bdbaec1634568ee02e0b8bd57c64cb38955" dependencies = [ - "darling", + "darling 0.21.3", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] @@ -2354,24 +2384,21 @@ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "siphasher" -version = "0.3.11" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" +checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" [[package]] name = "slab" -version = "0.4.9" +version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" -dependencies = [ - "autocfg", -] +checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" [[package]] name = "smallvec" -version = "1.13.2" +version = "1.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" [[package]] name = "stable_deref_trait" @@ -2387,7 +2414,7 @@ checksum = "bf776ba3fa74f83bf4b63c3dcbbf82173db2632ed8452cb2d891d33f459de70f" dependencies = [ "new_debug_unreachable", "parking_lot", - "phf_shared", + "phf_shared 0.11.3", "precomputed-hash", "serde", ] @@ -2399,7 +2426,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c711928715f1fe0fe509c53b43e993a9a557babc2d0a3567d0a3006f1ac931a0" dependencies = [ "phf_generator", - "phf_shared", + "phf_shared 0.11.3", "proc-macro2", "quote", ] @@ -2429,9 +2456,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.90" +version = "2.0.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "919d3b74a5dd0ccd15aeb8f93e7006bd9e14c295087c9896a110f490752bcf31" +checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6" dependencies = [ "proc-macro2", "quote", @@ -2440,13 +2467,13 @@ dependencies = [ [[package]] name = "synstructure" -version = "0.13.1" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] @@ -2462,29 +2489,29 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.63" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.63" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] name = "time" -version = "0.3.36" +version = "0.3.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" +checksum = "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d" dependencies = [ "deranged", "itoa", @@ -2497,15 +2524,15 @@ dependencies = [ [[package]] name = "time-core" -version = "0.1.2" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" +checksum = "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b" [[package]] name = "time-macros" -version = "0.2.18" +version = "0.2.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" +checksum = "30cfb0125f12d9c277f35663a0a33f8c30190f4e4574868a330595412d34ebf3" dependencies = [ "num-conv", "time-core", @@ -2513,9 +2540,9 @@ dependencies = [ [[package]] name = "tinystr" -version = "0.7.6" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b" dependencies = [ "displaydoc", "zerovec", @@ -2523,19 +2550,23 @@ dependencies = [ [[package]] name = "tokio" -version = "1.40.0" +version = "1.47.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2b070231665d27ad9ec9b8df639893f46727666c6767db40317fbe920a5d998" +checksum = "89e49afdadebb872d3145a5638b59eb0691ea23e46ca484037cfab3b76b95038" dependencies = [ "backtrace", + "io-uring", + "libc", + "mio", "pin-project-lite", + "slab", ] [[package]] name = "tokio-stream" -version = "0.1.16" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f4e6ce100d0eb49a2734f8c0812bcd324cf357d21810932c5df6b96ef2b86f1" +checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" dependencies = [ "futures-core", "pin-project-lite", @@ -2544,9 +2575,9 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.6.8" +version = "0.6.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" +checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" [[package]] name = "toml_edit" @@ -2554,16 +2585,16 @@ version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "indexmap 2.5.0", + "indexmap 2.11.4", "toml_datetime", "winnow", ] [[package]] name = "tracing" -version = "0.1.40" +version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ "pin-project-lite", "tracing-attributes", @@ -2572,60 +2603,58 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.27" +version = "0.1.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] name = "tracing-core" -version = "0.1.32" +version = "0.1.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678" dependencies = [ "once_cell", ] [[package]] name = "typenum" -version = "1.17.0" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" +checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" [[package]] name = "unicase" -version = "2.7.0" +version = "2.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89" -dependencies = [ - "version_check", -] +checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" [[package]] name = "unicode-ident" -version = "1.0.13" +version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e91b56cd4cadaeb79bbf1a5645f6b4f8dc5bde8834ad5894a8db35fda9efa1fe" +checksum = "f63a545481291138910575129486daeaf8ac54aee4387fe7906919f7830c7d9d" [[package]] name = "unicode-width" -version = "0.1.14" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" +checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c" [[package]] name = "url" -version = "2.5.4" +version = "2.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" +checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b" dependencies = [ "form_urlencoded", "idna", "percent-encoding", + "serde", ] [[package]] @@ -2640,12 +2669,6 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" -[[package]] -name = "utf16_iter" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" - [[package]] name = "utf8_iter" version = "1.0.4" @@ -2654,9 +2677,9 @@ checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" [[package]] name = "value-bag" -version = "1.9.0" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a84c137d37ab0142f0f2ddfe332651fdbf252e7b7dbb4e67b6c1f1b2e925101" +checksum = "943ce29a8a743eb10d6082545d861b24f9d1b160b7d741e0f2cdf726bec909c5" [[package]] name = "version_check" @@ -2666,24 +2689,24 @@ checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" +version = "0.11.1+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" [[package]] -name = "wasi" -version = "0.14.2+wasi-0.2.4" +name = "wasip2" +version = "1.0.1+wasi-0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" +checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" dependencies = [ - "wit-bindgen-rt", + "wit-bindgen", ] [[package]] name = "wasm-bindgen" -version = "0.2.100" +version = "0.2.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" +checksum = "da95793dfc411fbbd93f5be7715b0578ec61fe87cb1a42b12eb625caa5c5ea60" dependencies = [ "cfg-if", "once_cell", @@ -2691,27 +2714,14 @@ dependencies = [ "serde", "serde_json", "wasm-bindgen-macro", -] - -[[package]] -name = "wasm-bindgen-backend" -version = "0.2.100" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" -dependencies = [ - "bumpalo", - "log", - "proc-macro2", - "quote", - "syn 2.0.90", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.50" +version = "0.4.55" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61" +checksum = "551f88106c6d5e7ccc7cd9a16f312dd3b5d36ea8b4954304657d5dfba115d4a0" dependencies = [ "cfg-if", "js-sys", @@ -2722,9 +2732,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.100" +version = "0.2.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" +checksum = "04264334509e04a7bf8690f2384ef5265f05143a4bff3889ab7a3269adab59c2" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -2732,22 +2742,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.100" +version = "0.2.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" +checksum = "420bc339d9f322e562942d52e115d57e950d12d88983a14c79b86859ee6c7ebc" dependencies = [ + "bumpalo", "proc-macro2", "quote", - "syn 2.0.90", - "wasm-bindgen-backend", + "syn 2.0.106", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.100" +version = "0.2.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +checksum = "76f218a38c84bcb33c25ec7059b07847d465ce0e0a76b995e134a45adcb6af76" dependencies = [ "unicode-ident", ] @@ -2766,7 +2776,7 @@ dependencies = [ "data-encoding", "futures", "futures-util", - "getrandom 0.3.3", + "getrandom 0.3.4", "gloo 0.11.0", "gloo-events 0.2.0", "gloo-file 0.3.0", @@ -2774,12 +2784,13 @@ dependencies = [ "gloo-timers 0.3.0", "gloo-utils 0.2.0", "htmlentity", + "i18nrs", "js-sys", "log", "md5", "percent-encoding", "pulldown-cmark", - "rand 0.9.1", + "rand 0.9.2", "regex", "rss", "serde", @@ -2799,9 +2810,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.77" +version = "0.3.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2" +checksum = "3a1f95c0d03a47f4ae1f7a64643a6bb97465d9b740f0fa8f90ea33915c99a9a1" dependencies = [ "js-sys", "wasm-bindgen", @@ -2809,11 +2820,11 @@ dependencies = [ [[package]] name = "web_atoms" -version = "0.1.1" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08bcbdcad8fb2e316072ba6bbe09419afdb550285668ac2534f4230a6f2da0ee" +checksum = "57ffde1dc01240bdf9992e3205668b235e59421fd085e8a317ed98da0178d414" dependencies = [ - "phf", + "phf 0.11.3", "phf_codegen", "string_cache", "string_cache_codegen", @@ -2821,26 +2832,61 @@ dependencies = [ [[package]] name = "windows-core" -version = "0.52.0" +version = "0.62.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" +checksum = "6844ee5416b285084d3d3fffd743b925a6c9385455f64f6d4fa3031c4c2749a9" dependencies = [ - "windows-targets", + "windows-implement", + "windows-interface", + "windows-link", + "windows-result", + "windows-strings", +] + +[[package]] +name = "windows-implement" +version = "0.60.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edb307e42a74fb6de9bf3a02d9712678b22399c87e6fa869d6dfcd8c1b7754e0" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", +] + +[[package]] +name = "windows-interface" +version = "0.59.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0abd1ddbc6964ac14db11c7213d6532ef34bd9aa042c2e5935f59d7908b46a5" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", ] [[package]] name = "windows-link" -version = "0.1.0" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6dccfd733ce2b1753b03b6d3c65edf020262ea35e20ccdf3e288043e6dd620e3" +checksum = "45e46c0661abb7180e7b9c281db115305d49ca1709ab8242adf09666d2173c65" [[package]] -name = "windows-sys" -version = "0.52.0" +name = "windows-result" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +checksum = "7084dcc306f89883455a206237404d3eaf961e5bd7e0f312f7c91f57eb44167f" dependencies = [ - "windows-targets", + "windows-link", +] + +[[package]] +name = "windows-strings" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7218c655a553b0bed4426cf54b20d7ba363ef543b52d515b3e48d7fd55318dda" +dependencies = [ + "windows-link", ] [[package]] @@ -2852,6 +2898,15 @@ dependencies = [ "windows-targets", ] +[[package]] +name = "windows-sys" +version = "0.61.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f109e41dd4a3c848907eb83d5a42ea98b3769495597450cf6d153507b166f0f" +dependencies = [ + "windows-link", +] + [[package]] name = "windows-targets" version = "0.52.6" @@ -2926,25 +2981,16 @@ dependencies = [ ] [[package]] -name = "wit-bindgen-rt" -version = "0.39.0" +name = "wit-bindgen" +version = "0.46.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" -dependencies = [ - "bitflags", -] - -[[package]] -name = "write16" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" +checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" [[package]] name = "writeable" -version = "0.5.5" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" +checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" [[package]] name = "yew" @@ -2984,7 +3030,7 @@ dependencies = [ "futures", "gloo 0.10.0", "implicit-clone", - "indexmap 2.5.0", + "indexmap 2.11.4", "js-sys", "prokio", "rustversion", @@ -3024,7 +3070,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] @@ -3054,7 +3100,7 @@ checksum = "42bfd190a07ca8cfde7cd4c52b3ac463803dc07323db8c34daa697e86365978c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] @@ -3080,11 +3126,11 @@ version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e7ac6ccd84a49bbce44610d44eb6686a1266337d0cd3aeadb5564ab76a2819f0" dependencies = [ - "darling", + "darling 0.20.11", "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] @@ -3102,9 +3148,9 @@ dependencies = [ [[package]] name = "yoke" -version = "0.7.5" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" +checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc" dependencies = [ "serde", "stable_deref_trait", @@ -3114,63 +3160,73 @@ dependencies = [ [[package]] name = "yoke-derive" -version = "0.7.5" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" +checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", "synstructure", ] [[package]] name = "zerocopy" -version = "0.7.35" +version = "0.8.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" +checksum = "0894878a5fa3edfd6da3f88c4805f4c8558e2b996227a3d864f47fe11e38282c" dependencies = [ - "byteorder", "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.7.35" +version = "0.8.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" +checksum = "88d2b8d9c68ad2b9e4340d7832716a4d21a22a1154777ad56ea55c51a9cf3831" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] name = "zerofrom" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cff3ee08c995dee1859d998dea82f7374f2826091dd9cd47def953cae446cd2e" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" dependencies = [ "zerofrom-derive", ] [[package]] name = "zerofrom-derive" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", "synstructure", ] +[[package]] +name = "zerotrie" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + [[package]] name = "zerovec" -version = "0.10.4" +version = "0.11.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +checksum = "e7aa2bd55086f1ab526693ecbe444205da57e25f4489879da80635a46d90e73b" dependencies = [ "yoke", "zerofrom", @@ -3179,11 +3235,11 @@ dependencies = [ [[package]] name = "zerovec-derive" -version = "0.10.3" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] diff --git a/web/Cargo.toml b/web/Cargo.toml index 739f54f8..88127668 100644 --- a/web/Cargo.toml +++ b/web/Cargo.toml @@ -2,7 +2,7 @@ name = "web" version = "0.1.0" edition = "2021" -rust-version = "1.86" +rust-version = "1.89" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html @@ -11,7 +11,7 @@ rust-version = "1.86" #yew = { git = "https://github.com/yewstack/yew/", features = ["csr"] } yew = { version = "0.21.0", features = ["csr"] } #yew = { "0.21.0", features = ["csr"] } -web-sys = { version = "0.3.77", features = [ +web-sys = { version = "0.3.82", features = [ "CssStyleDeclaration", "DomTokenList", "HtmlSelectElement", @@ -53,43 +53,44 @@ web-sys = { version = "0.3.77", features = [ "MouseEventInit", "CustomEvent", ] } -log = "0.4.27" -wasm-bindgen = "0.2.100" +log = "0.4.28" +wasm-bindgen = "0.2.105" yew-router = { version = "0.18.0" } -serde = { version = "1.0.219", features = ["derive"] } +serde = { version = "1.0.225", features = ["derive"] } gloo-net = { version = "0.6.0", features = ["websocket"] } gloo = "0.11.0" -anyhow = { version = "1.0.98", features = [] } -wasm-bindgen-futures = "0.4.50" +anyhow = { version = "1.0.99", features = [] } +wasm-bindgen-futures = "0.4.55" gloo-timers = "0.3.0" base64 = "0.22.1" yewdux = "0.11.0" rss = "2.0.12" -chrono = "0.4.41" -serde_json = "1.0.140" +chrono = "0.4.42" +serde_json = "1.0.145" yewtil = "0.4.0" gloo-utils = "0.2.0" gloo-events = "0.2.0" md5 = "0.8.0" -ammonia = "4.1.0" +ammonia = "4.1.1" pulldown-cmark = "0.13.0" -async-std = "1.13.1" +async-std = "1.13.2" argon2 = { version = "0.5.3", features = ["std", "password-hash"] } -getrandom = { version = "0.3.3", features = ["wasm_js"] } -rand = "0.9.1" -regex = "1.11.1" -js-sys = "0.3.77" -percent-encoding = "2.3.1" +getrandom = { version = "0.3.4", features = ["wasm_js"] } +rand = "0.9.2" +regex = "1.12.2" +js-sys = "0.3.82" +percent-encoding = "2.3.2" data-encoding = "2.9.0" -url = "2.5.4" +url = "2.5.7" serde-wasm-bindgen = "0.6.5" -chrono-tz = "0.10.3" +chrono-tz = "0.10.4" futures = "0.3.31" futures-util = "0.3.31" gloo-file = "0.3.0" urlencoding = "2.1.3" -serde_with = "3.14.0" +serde_with = "3.15.1" htmlentity = "1.3.2" +i18nrs = { git = "https://github.com/madeofpendletonwool/i18n-rs", features = ["yew"] } [features] default = [] diff --git a/web/build.bat b/web/build.bat new file mode 100644 index 00000000..99153fe1 --- /dev/null +++ b/web/build.bat @@ -0,0 +1,3 @@ +@echo off +set RUSTFLAGS=--cfg=web_sys_unstable_apis --cfg getrandom_backend="wasm_js" +trunk build --features server_build \ No newline at end of file diff --git a/web/build.ps1 b/web/build.ps1 new file mode 100644 index 00000000..c21bf160 --- /dev/null +++ b/web/build.ps1 @@ -0,0 +1,2 @@ +$env:RUSTFLAGS="--cfg=web_sys_unstable_apis --cfg=getrandom_backend=`"wasm_js`"" +trunk build --features server_build \ No newline at end of file diff --git a/web/build.sh b/web/build.sh new file mode 100755 index 00000000..d4c5363f --- /dev/null +++ b/web/build.sh @@ -0,0 +1,4 @@ +#!/bin/bash +cd "$(dirname "$0")" +export RUSTFLAGS="--cfg=web_sys_unstable_apis --cfg getrandom_backend=\"wasm_js\"" +trunk build --features server_build \ No newline at end of file diff --git a/web/src-tauri/Cargo.lock b/web/src-tauri/Cargo.lock index 0141f3dd..4f48d69e 100644 --- a/web/src-tauri/Cargo.lock +++ b/web/src-tauri/Cargo.lock @@ -2,20 +2,11 @@ # It is not intended for manual editing. version = 4 -[[package]] -name = "addr2line" -version = "0.24.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" -dependencies = [ - "gimli", -] - [[package]] name = "adler2" -version = "2.0.0" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" [[package]] name = "aho-corasick" @@ -58,9 +49,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.94" +version = "1.0.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1fd03a028ef38ba2276dce7e33fcd6369c158a1bca17946c4b1b701891c1ff7" +checksum = "b0674a1ddeecb70197781e945de4b3b8ffb61fa939a5597bcf48503737663100" [[package]] name = "app" @@ -101,25 +92,16 @@ dependencies = [ ] [[package]] -name = "autocfg" -version = "1.4.0" +name = "atomic-waker" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" [[package]] -name = "backtrace" -version = "0.3.74" +name = "autocfg" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" -dependencies = [ - "addr2line", - "cfg-if", - "libc", - "miniz_oxide", - "object", - "rustc-demangle", - "windows-targets 0.52.6", -] +checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" [[package]] name = "base64" @@ -141,9 +123,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.6.0" +version = "2.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" +checksum = "34efbcccd345379ca2868b2b2c9d3782e9cc58ba87bc7d79d5b53d9c9ae6f25d" dependencies = [ "serde", ] @@ -168,18 +150,18 @@ dependencies = [ [[package]] name = "block2" -version = "0.6.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d59b4c170e16f0405a2e95aff44432a0d41aa97675f3d52623effe95792a037" +checksum = "340d2f0bdb2a43c1d3cd40513185b2bd7def0aa1052f956455114bc98f82dcf2" dependencies = [ - "objc2 0.6.0", + "objc2 0.6.2", ] [[package]] name = "brotli" -version = "8.0.1" +version = "8.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9991eea70ea4f293524138648e41ee89b0b2b12ddef3b255effa43c8056e0e0d" +checksum = "4bd8b9603c7aa97359dbd97ecf258968c95f3adddd6db2f7e7a5bef101c84560" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", @@ -198,15 +180,15 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.16.0" +version = "3.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" +checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" [[package]] name = "bytemuck" -version = "1.20.0" +version = "1.23.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b37c88a63ffd85d15b406896cc343916d7cf57838a847b3a6f2ca5d39a5695a" +checksum = "3995eaeebcdf32f91f980d360f78732ddc061097ab4e39991ae7a6ace9194677" [[package]] name = "byteorder" @@ -216,9 +198,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.9.0" +version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" +checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" dependencies = [ "serde", ] @@ -229,7 +211,7 @@ version = "0.18.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ca26ef0159422fb77631dc9d17b102f253b876fe1586b03b803e63a309b4ee2" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.9.3", "cairo-sys-rs", "glib", "libc", @@ -250,9 +232,9 @@ dependencies = [ [[package]] name = "camino" -version = "1.1.9" +version = "1.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b96ec4966b5813e2c0507c1f86115c8c5abaadc3980879c3424042a02fd1ad3" +checksum = "dd0b03af37dad7a14518b7691d81acb0f8222604ad3d1b02f6b4bed5188c0cd5" dependencies = [ "serde", ] @@ -268,33 +250,33 @@ dependencies = [ [[package]] name = "cargo_metadata" -version = "0.19.1" +version = "0.19.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8769706aad5d996120af43197bf46ef6ad0fda35216b4505f926a365a232d924" +checksum = "dd5eb614ed4c27c5d706420e4320fbe3216ab31fa1c33cd8246ac36dae4479ba" dependencies = [ "camino", "cargo-platform", "semver", "serde", "serde_json", - "thiserror 2.0.7", + "thiserror 2.0.16", ] [[package]] name = "cargo_toml" -version = "0.22.1" +version = "0.22.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02260d489095346e5cafd04dea8e8cb54d1d74fcd759022a9b72986ebe9a1257" +checksum = "374b7c592d9c00c1f4972ea58390ac6b18cbb6ab79011f3bdc90a0b82ca06b77" dependencies = [ "serde", - "toml", + "toml 0.9.5", ] [[package]] name = "cc" -version = "1.2.4" +version = "1.2.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9157bbaa6b165880c27a4293a474c91cdcf265cc68cc829bf10be0964a391caf" +checksum = "42bc4aea80032b7bf409b0bc7ccad88853858911b7713a8062fdc0623867bedc" dependencies = [ "shlex", ] @@ -328,9 +310,9 @@ dependencies = [ [[package]] name = "cfg-if" -version = "1.0.0" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +checksum = "2fd1289c04a9ea8cb22300a459a72a385d7c73d3259e2ed7dcb2af674838cfa9" [[package]] name = "cfg_aliases" @@ -340,15 +322,15 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" [[package]] name = "chrono" -version = "0.4.39" +version = "0.4.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e36cc9d416881d2e24f9a963be5fb1cd90966419ac844274161d10488b3e825" +checksum = "c469d952047f47f91b68d1cba3f10d63c11d73e4636f24f08daf0278abf01c4d" dependencies = [ "android-tzdata", "iana-time-zone", "num-traits", "serde", - "windows-targets 0.52.6", + "windows-link 0.1.3", ] [[package]] @@ -379,9 +361,9 @@ dependencies = [ [[package]] name = "core-foundation" -version = "0.10.0" +version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b55271e5c8c478ad3f38ad24ef34923091e0548492a266d19b3c0b4d82574c63" +checksum = "b2a6cd9ae233e7f62ba4e9353e81a88df7fc8a5987b8d445b4d90c879bd156f6" dependencies = [ "core-foundation-sys", "libc", @@ -399,7 +381,7 @@ version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa95a34622365fa5bbf40b20b75dba8dfa8c94c734aea8ac9a5ca38af14316f1" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.9.3", "core-foundation", "core-graphics-types", "foreign-types", @@ -412,34 +394,34 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3d44a101f213f6c4cdc1853d4b78aef6db6bdfa3468798cc1d9912f4735013eb" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.9.3", "core-foundation", "libc", ] [[package]] name = "cpufeatures" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16b80225097f2e5ae4e7179dd2266824648f3e2f49d9134d584b76389d31c4c3" +checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280" dependencies = [ "libc", ] [[package]] name = "crc32fast" -version = "1.4.2" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" dependencies = [ "cfg-if", ] [[package]] name = "crossbeam-channel" -version = "0.5.14" +version = "0.5.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06ba6d68e24814cb8de6bb986db8222d3a027d15872cabc0d18817bc3c0e4471" +checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2" dependencies = [ "crossbeam-utils", ] @@ -484,7 +466,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331" dependencies = [ "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] @@ -494,14 +476,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a2785755761f3ddc1492979ce1e48d2c00d09311c39e4466429188f3dd6501" dependencies = [ "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] name = "darling" -version = "0.20.10" +version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989" +checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee" dependencies = [ "darling_core", "darling_macro", @@ -509,40 +491,34 @@ dependencies = [ [[package]] name = "darling_core" -version = "0.20.10" +version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5" +checksum = "0d00b9596d185e565c2207a0b01f8bd1a135483d02d9b7b0a54b11da8d53412e" dependencies = [ "fnv", "ident_case", "proc-macro2", "quote", "strsim", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] name = "darling_macro" -version = "0.20.10" +version = "0.20.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" +checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" dependencies = [ "darling_core", "quote", - "syn 2.0.90", + "syn 2.0.106", ] -[[package]] -name = "data-encoding" -version = "2.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8566979429cf69b49a5c740c60791108e86440e8be149bbea4fe54d2c32d6e2" - [[package]] name = "deranged" -version = "0.3.11" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" +checksum = "9c9e6a11ca8224451684bc0d7d5a7adbf8f2fd6887261a1cfc3c0432f9d4068e" dependencies = [ "powerfmt", "serde", @@ -550,15 +526,15 @@ dependencies = [ [[package]] name = "derive_more" -version = "0.99.18" +version = "0.99.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce" +checksum = "6edb4b64a43d977b8e99788fe3a04d483834fba1215a7e02caa415b626497f7f" dependencies = [ "convert_case", "proc-macro2", "quote", "rustc_version", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] @@ -598,7 +574,7 @@ dependencies = [ "libc", "option-ext", "redox_users", - "windows-sys 0.59.0", + "windows-sys 0.60.2", ] [[package]] @@ -607,6 +583,16 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bd0c93bb4b0c6d9b77f4435b0ae98c24d17f1c45b2ff844c6151a07256ca923b" +[[package]] +name = "dispatch2" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89a09f22a6c6069a18470eb92d2298acf25463f14256d24778e1230d789a2aec" +dependencies = [ + "bitflags 2.9.3", + "objc2 0.6.2", +] + [[package]] name = "displaydoc" version = "0.2.5" @@ -615,14 +601,14 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] name = "dlopen2" -version = "0.7.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e1297103d2bbaea85724fcee6294c2d50b1081f9ad47d0f6f6f61eda65315a6" +checksum = "b54f373ccf864bf587a89e880fb7610f8d73f3045f13580948ccbcaff26febff" dependencies = [ "dlopen2_derive", "libc", @@ -632,29 +618,29 @@ dependencies = [ [[package]] name = "dlopen2_derive" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2b99bf03862d7f545ebc28ddd33a665b50865f4dfd84031a393823879bd4c54" +checksum = "788160fb30de9cdd857af31c6a2675904b16ece8fc2737b2c7127ba368c9d0f4" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] name = "dpi" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f25c0e292a7ca6d6498557ff1df68f32c99850012b6ea401cf8daf771f22ff53" +checksum = "d8b14ccef22fc6f5a8f4d7d768562a182c04ce9a3b3157b91390b52ddfdf1a76" dependencies = [ "serde", ] [[package]] name = "dtoa" -version = "1.0.9" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcbb2bf8e87535c23f7a8a321e364ce21462d0ff10cb6407820e8e96dfff6653" +checksum = "d6add3b8cff394282be81f3fc1a0605db594ed69890078ca6e2cab1c408bcf04" [[package]] name = "dtoa-short" @@ -673,20 +659,20 @@ checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" [[package]] name = "dyn-clone" -version = "1.0.17" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d6ef0072f8a535281e4876be788938b528e9a1d43900b82c2569af7da799125" +checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555" [[package]] name = "embed-resource" -version = "3.0.2" +version = "3.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fbc6e0d8e0c03a655b53ca813f0463d2c956bc4db8138dbc89f120b066551e3" +checksum = "4c6d81016d6c977deefb2ef8d8290da019e27cc26167e102185da528e6c0ab38" dependencies = [ "cc", "memchr", "rustc_version", - "toml", + "toml 0.9.5", "vswhom", "winreg", ] @@ -697,26 +683,17 @@ version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ef6b89e5b37196644d8796de5268852ff179b44e96276cf4290264843743bb7" -[[package]] -name = "encoding_rs" -version = "0.8.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" -dependencies = [ - "cfg-if", -] - [[package]] name = "equivalent" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" [[package]] name = "erased-serde" -version = "0.4.5" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24e2389d65ab4fab27dc2a5de7b191e1f6617d1f1c8855c0dc569c94a4cbb18d" +checksum = "e004d887f51fcb9fef17317a2f3525c887d8aa3f4f50fed920816a688284a5b7" dependencies = [ "serde", "typeid", @@ -743,9 +720,9 @@ dependencies = [ [[package]] name = "flate2" -version = "1.0.35" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c" +checksum = "4a3d7db9596fecd151c5f638c0ee5d5bd487b6e0ea232e5dc96d5250f6f94b1d" dependencies = [ "crc32fast", "miniz_oxide", @@ -775,7 +752,7 @@ checksum = "1a5c6c585bc94aaf2c7b51dd4c2ba22680844aba4c687be581871a6f518c5742" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] @@ -786,9 +763,9 @@ checksum = "aa9a19cbb55df58761df49b23516a86d432839add4af60fc256da840f66ed35b" [[package]] name = "form_urlencoded" -version = "1.2.1" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" dependencies = [ "percent-encoding", ] @@ -810,7 +787,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" dependencies = [ "futures-core", - "futures-sink", ] [[package]] @@ -844,7 +820,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] @@ -1007,13 +983,13 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" +checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592" dependencies = [ "cfg-if", "libc", - "wasi 0.11.0+wasi-snapshot-preview1", + "wasi 0.11.1+wasi-snapshot-preview1", ] [[package]] @@ -1028,12 +1004,6 @@ dependencies = [ "wasi 0.14.2+wasi-0.2.4", ] -[[package]] -name = "gimli" -version = "0.31.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" - [[package]] name = "gio" version = "0.18.4" @@ -1072,7 +1042,7 @@ version = "0.18.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "233daaf6e83ae6a12a52055f568f9d7cf4671dabb78ff9560ab6da230ce00ee5" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.9.3", "futures-channel", "futures-core", "futures-executor", @@ -1096,11 +1066,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0bb0228f477c0900c880fd78c8759b95c7636dbd7842707f49e132378aa2acdc" dependencies = [ "heck 0.4.1", - "proc-macro-crate 2.0.0", + "proc-macro-crate 2.0.2", "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] @@ -1115,9 +1085,9 @@ dependencies = [ [[package]] name = "glob" -version = "0.3.1" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" +checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" [[package]] name = "gobject-sys" @@ -1179,22 +1149,22 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] name = "h2" -version = "0.3.26" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" +checksum = "f3c0b69cfcb4e1b9f1bf2f53f95f766e4661169728ec61cd3fe5a0166f2d1386" dependencies = [ + "atomic-waker", "bytes", "fnv", "futures-core", "futures-sink", - "futures-util", - "http 0.2.12", - "indexmap 2.7.0", + "http", + "indexmap 2.11.0", "slab", "tokio", "tokio-util", @@ -1209,20 +1179,20 @@ checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" [[package]] name = "hashbrown" -version = "0.15.2" +version = "0.15.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" [[package]] name = "headers" -version = "0.3.9" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06683b93020a07e3dbcf5f8c0f6d40080d725bea7936fc01ad345c01b97dc270" +checksum = "b3314d5adb5d94bcdf56771f2e50dbbc80bb4bdf88967526706205ac9eff24eb" dependencies = [ - "base64 0.21.7", + "base64 0.22.1", "bytes", "headers-core", - "http 0.2.12", + "http", "httpdate", "mime", "sha1", @@ -1230,11 +1200,11 @@ dependencies = [ [[package]] name = "headers-core" -version = "0.2.0" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7f66481bfee273957b1f20485a4ff3362987f85b2c236580d81b4eb7a326429" +checksum = "54b4a22553d4242c49fddb9ba998a99962b5cc6f22cb5a3482bec22522403ce4" dependencies = [ - "http 0.2.12", + "http", ] [[package]] @@ -1269,37 +1239,15 @@ dependencies = [ [[package]] name = "http" -version = "0.2.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" -dependencies = [ - "bytes", - "fnv", - "itoa", -] - -[[package]] -name = "http" -version = "1.2.0" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea" +checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565" dependencies = [ "bytes", "fnv", "itoa", ] -[[package]] -name = "http-body" -version = "0.4.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" -dependencies = [ - "bytes", - "http 0.2.12", - "pin-project-lite", -] - [[package]] name = "http-body" version = "1.0.1" @@ -1307,27 +1255,27 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", - "http 1.2.0", + "http", ] [[package]] name = "http-body-util" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" +checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" dependencies = [ "bytes", - "futures-util", - "http 1.2.0", - "http-body 1.0.1", + "futures-core", + "http", + "http-body", "pin-project-lite", ] [[package]] name = "httparse" -version = "1.9.5" +version = "1.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d71d3574edd2771538b901e6549113b4006ece66150fb69c0fb6d9a2adae946" +checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" [[package]] name = "httpdate" @@ -1337,42 +1285,22 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "hyper" -version = "0.14.32" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41dfc780fdec9373c01bae43289ea34c972e40ee3c9f6b3c8801a35f35586ce7" +checksum = "eb3aa54a13a0dfe7fbe3a59e0c76093041720fdc77b110cc0fc260fafb4dc51e" dependencies = [ + "atomic-waker", "bytes", "futures-channel", "futures-core", - "futures-util", "h2", - "http 0.2.12", - "http-body 0.4.6", + "http", + "http-body", "httparse", "httpdate", "itoa", "pin-project-lite", - "socket2", - "tokio", - "tower-service", - "tracing", - "want", -] - -[[package]] -name = "hyper" -version = "1.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "256fb8d4bd6413123cc9d91832d78325c48ff41677595be797d90f42969beae0" -dependencies = [ - "bytes", - "futures-channel", - "futures-util", - "http 1.2.0", - "http-body 1.0.1", - "httparse", - "itoa", - "pin-project-lite", + "pin-utils", "smallvec", "tokio", "want", @@ -1380,16 +1308,21 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.10" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" +checksum = "8d9b05277c7e8da2c93a568989bb6207bef0112e8d17df7a6eda4a3cf143bc5e" dependencies = [ + "base64 0.22.1", "bytes", "futures-channel", + "futures-core", "futures-util", - "http 1.2.0", - "http-body 1.0.1", - "hyper 1.5.2", + "http", + "http-body", + "hyper", + "ipnet", + "libc", + "percent-encoding", "pin-project-lite", "socket2", "tokio", @@ -1399,16 +1332,17 @@ dependencies = [ [[package]] name = "iana-time-zone" -version = "0.1.61" +version = "0.1.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "235e081f3925a06703c2d0117ea8b91f042756fd6e7a6e5d901e8ca1a996b220" +checksum = "b0c919e5debc312ad217002b8048a17b7d83f80703865bbfcfebb0458b0b27d8" dependencies = [ "android_system_properties", "core-foundation-sys", "iana-time-zone-haiku", "js-sys", + "log", "wasm-bindgen", - "windows-core 0.52.0", + "windows-core", ] [[package]] @@ -1432,21 +1366,22 @@ dependencies = [ [[package]] name = "icu_collections" -version = "1.5.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" +checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47" dependencies = [ "displaydoc", + "potential_utf", "yoke", "zerofrom", "zerovec", ] [[package]] -name = "icu_locid" -version = "1.5.0" +name = "icu_locale_core" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" +checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a" dependencies = [ "displaydoc", "litemap", @@ -1455,31 +1390,11 @@ dependencies = [ "zerovec", ] -[[package]] -name = "icu_locid_transform" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" -dependencies = [ - "displaydoc", - "icu_locid", - "icu_locid_transform_data", - "icu_provider", - "tinystr", - "zerovec", -] - -[[package]] -name = "icu_locid_transform_data" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" - [[package]] name = "icu_normalizer" -version = "1.5.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" +checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979" dependencies = [ "displaydoc", "icu_collections", @@ -1487,67 +1402,54 @@ dependencies = [ "icu_properties", "icu_provider", "smallvec", - "utf16_iter", - "utf8_iter", - "write16", "zerovec", ] [[package]] name = "icu_normalizer_data" -version = "1.5.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" +checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3" [[package]] name = "icu_properties" -version = "1.5.1" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" +checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b" dependencies = [ "displaydoc", "icu_collections", - "icu_locid_transform", + "icu_locale_core", "icu_properties_data", "icu_provider", - "tinystr", + "potential_utf", + "zerotrie", "zerovec", ] [[package]] name = "icu_properties_data" -version = "1.5.0" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" +checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632" [[package]] name = "icu_provider" -version = "1.5.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" +checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af" dependencies = [ "displaydoc", - "icu_locid", - "icu_provider_macros", + "icu_locale_core", "stable_deref_trait", "tinystr", "writeable", "yoke", "zerofrom", + "zerotrie", "zerovec", ] -[[package]] -name = "icu_provider_macros" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.90", -] - [[package]] name = "ident_case" version = "1.0.1" @@ -1556,9 +1458,9 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "idna" -version = "1.0.3" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" dependencies = [ "idna_adapter", "smallvec", @@ -1567,9 +1469,9 @@ dependencies = [ [[package]] name = "idna_adapter" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" +checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344" dependencies = [ "icu_normalizer", "icu_properties", @@ -1588,12 +1490,12 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.7.0" +version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f" +checksum = "f2481980430f9f78649238835720ddccc57e52df14ffce1c6f37391d61b563e9" dependencies = [ "equivalent", - "hashbrown 0.15.2", + "hashbrown 0.15.5", "serde", ] @@ -1607,27 +1509,26 @@ dependencies = [ ] [[package]] -name = "io-uring" -version = "0.7.8" +name = "ipnet" +version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b86e202f00093dcba4275d4636b93ef9dd75d025ae560d2521b45ea28ab49013" -dependencies = [ - "bitflags 2.6.0", - "cfg-if", - "libc", -] +checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" [[package]] -name = "ipnet" -version = "2.10.1" +name = "iri-string" +version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddc24109865250148c2e0f3d25d4f0f479571723792d3802153c60922a4fb708" +checksum = "dbc5ebe9c3a1a7a5127f920a418f7585e9e758e911d0466ed004f393b0e380b2" +dependencies = [ + "memchr", + "serde", +] [[package]] name = "itoa" -version = "1.0.14" +version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" +checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" [[package]] name = "javascriptcore-rs" @@ -1676,9 +1577,9 @@ checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" [[package]] name = "js-sys" -version = "0.3.76" +version = "0.3.77" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6717b6b5b077764fb5966237269cb3c64edddde4b14ce42647430a78ced9e7b7" +checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" dependencies = [ "once_cell", "wasm-bindgen", @@ -1712,7 +1613,7 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b750dcadc39a09dbadd74e118f6dd6598df77fa01df0cfcdc52c28dece74528a" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.9.3", "serde", "unicode-segmentation", ] @@ -1725,7 +1626,7 @@ checksum = "02cb977175687f33fa4afa0c95c112b987ea1443e5a51c8f8ff27dc618270cc2" dependencies = [ "cssparser", "html5ever", - "indexmap 2.7.0", + "indexmap 2.11.0", "selectors", ] @@ -1761,9 +1662,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.168" +version = "0.2.175" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aaeb2981e0606ca11d79718f8bb01164f1d6ed75080182d3abf017e6d244b6d" +checksum = "6a82ae493e598baaea5209805c49bbf2ea7de956d50d7da0da1164f9c6d28543" [[package]] name = "libloading" @@ -1777,25 +1678,25 @@ dependencies = [ [[package]] name = "libredox" -version = "0.1.3" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" +checksum = "391290121bad3d37fbddad76d8f5d1c1c314cfc646d143d7e07a3086ddff0ce3" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.9.3", "libc", ] [[package]] name = "litemap" -version = "0.7.4" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" +checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956" [[package]] name = "lock_api" -version = "0.4.12" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" +checksum = "96936507f153605bddfcda068dd804796c84324ed2510809e5b2a624c81da765" dependencies = [ "autocfg", "scopeguard", @@ -1803,9 +1704,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.26" +version = "0.4.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30bde2b3dc3671ae49d8e2e9f044c7c005836e7a023ee57cffa25ab82764bb9e" +checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" [[package]] name = "mac" @@ -1820,7 +1721,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c7a7213d12e1864c0f002f52c2923d4556935a43dec5e71355c2760e0f6e7a18" dependencies = [ "log", - "phf 0.11.2", + "phf 0.11.3", "phf_codegen 0.11.3", "string_cache", "string_cache_codegen", @@ -1835,7 +1736,7 @@ checksum = "88a9689d8d44bf9964484516275f5cd4c9b59457a6940c1d5d0ecbb94510a36b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] @@ -1846,9 +1747,9 @@ checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" [[package]] name = "memchr" -version = "2.7.4" +version = "2.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" +checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" [[package]] name = "memoffset" @@ -1877,9 +1778,9 @@ dependencies = [ [[package]] name = "miniz_oxide" -version = "0.8.0" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2d80299ef12ff69b16a84bb182e3b9df68b5a91574d3d4fa6e41b65deec4df1" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" dependencies = [ "adler2", "simd-adler32", @@ -1887,52 +1788,34 @@ dependencies = [ [[package]] name = "mio" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" +checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c" dependencies = [ "libc", - "wasi 0.11.0+wasi-snapshot-preview1", - "windows-sys 0.52.0", + "wasi 0.11.1+wasi-snapshot-preview1", + "windows-sys 0.59.0", ] [[package]] name = "muda" -version = "0.17.0" +version = "0.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58b89bf91c19bf036347f1ab85a81c560f08c0667c8601bece664d860a600988" +checksum = "01c1738382f66ed56b3b9c8119e794a2e23148ac8ea214eda86622d4cb9d415a" dependencies = [ "crossbeam-channel", "dpi", "gtk", "keyboard-types", - "objc2 0.6.0", + "objc2 0.6.2", "objc2-app-kit", "objc2-core-foundation", - "objc2-foundation 0.3.0", + "objc2-foundation 0.3.1", "once_cell", "png", "serde", - "thiserror 2.0.7", - "windows-sys 0.59.0", -] - -[[package]] -name = "multer" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01acbdc23469fd8fe07ab135923371d5f5a422fbf9c522158677c8eb15bc51c2" -dependencies = [ - "bytes", - "encoding_rs", - "futures-util", - "http 0.2.12", - "httparse", - "log", - "memchr", - "mime", - "spin", - "version_check", + "thiserror 2.0.16", + "windows-sys 0.60.2", ] [[package]] @@ -1941,7 +1824,7 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3f42e7bbe13d351b6bead8286a43aac9534b82bd3cc43e47037f012ebfd62d4" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.9.3", "jni-sys", "log", "ndk-sys", @@ -1994,23 +1877,24 @@ dependencies = [ [[package]] name = "num_enum" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e613fc340b2220f734a8595782c551f1250e969d87d3be1ae0579e8d4065179" +checksum = "a973b4e44ce6cad84ce69d797acf9a044532e4184c4f267913d1b546a0727b7a" dependencies = [ "num_enum_derive", + "rustversion", ] [[package]] name = "num_enum_derive" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af1844ef2428cc3e1cb900be36181049ef3d3193c63e43026cfe202983b27a56" +checksum = "77e878c846a8abae00dd069496dbe8751b16ac1c3d6bd2a7283a938e8228f90d" dependencies = [ - "proc-macro-crate 2.0.0", + "proc-macro-crate 2.0.2", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] @@ -2031,9 +1915,9 @@ dependencies = [ [[package]] name = "objc2" -version = "0.6.0" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3531f65190d9cff863b77a99857e74c314dd16bf56c538c4b57c7cbc3f3a6e59" +checksum = "561f357ba7f3a2a61563a186a163d0a3a5247e1089524a3981d49adb775078bc" dependencies = [ "objc2-encode", "objc2-exception-helper", @@ -2041,75 +1925,77 @@ dependencies = [ [[package]] name = "objc2-app-kit" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5906f93257178e2f7ae069efb89fbd6ee94f0592740b5f8a1512ca498814d0fb" +checksum = "e6f29f568bec459b0ddff777cec4fe3fd8666d82d5a40ebd0ff7e66134f89bcc" dependencies = [ - "bitflags 2.6.0", - "block2 0.6.0", + "bitflags 2.9.3", + "block2 0.6.1", "libc", - "objc2 0.6.0", + "objc2 0.6.2", "objc2-cloud-kit", "objc2-core-data", "objc2-core-foundation", "objc2-core-graphics", "objc2-core-image", - "objc2-foundation 0.3.0", - "objc2-quartz-core 0.3.0", + "objc2-foundation 0.3.1", + "objc2-quartz-core 0.3.1", ] [[package]] name = "objc2-cloud-kit" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c1948a9be5f469deadbd6bcb86ad7ff9e47b4f632380139722f7d9840c0d42c" +checksum = "17614fdcd9b411e6ff1117dfb1d0150f908ba83a7df81b1f118005fe0a8ea15d" dependencies = [ - "bitflags 2.6.0", - "objc2 0.6.0", - "objc2-foundation 0.3.0", + "bitflags 2.9.3", + "objc2 0.6.2", + "objc2-foundation 0.3.1", ] [[package]] name = "objc2-core-data" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f860f8e841f6d32f754836f51e6bc7777cd7e7053cf18528233f6811d3eceb4" +checksum = "291fbbf7d29287518e8686417cf7239c74700fd4b607623140a7d4a3c834329d" dependencies = [ - "bitflags 2.6.0", - "objc2 0.6.0", - "objc2-foundation 0.3.0", + "bitflags 2.9.3", + "objc2 0.6.2", + "objc2-foundation 0.3.1", ] [[package]] name = "objc2-core-foundation" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "daeaf60f25471d26948a1c2f840e3f7d86f4109e3af4e8e4b5cd70c39690d925" +checksum = "1c10c2894a6fed806ade6027bcd50662746363a9589d3ec9d9bef30a4e4bc166" dependencies = [ - "bitflags 2.6.0", - "objc2 0.6.0", + "bitflags 2.9.3", + "dispatch2", + "objc2 0.6.2", ] [[package]] name = "objc2-core-graphics" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8dca602628b65356b6513290a21a6405b4d4027b8b250f0b98dddbb28b7de02" +checksum = "989c6c68c13021b5c2d6b71456ebb0f9dc78d752e86a98da7c716f4f9470f5a4" dependencies = [ - "bitflags 2.6.0", - "objc2 0.6.0", + "bitflags 2.9.3", + "dispatch2", + "objc2 0.6.2", "objc2-core-foundation", "objc2-io-surface", ] [[package]] name = "objc2-core-image" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ffa6bea72bf42c78b0b34e89c0bafac877d5f80bf91e159a5d96ea7f693ca56" +checksum = "79b3dc0cc4386b6ccf21c157591b34a7f44c8e75b064f85502901ab2188c007e" dependencies = [ - "objc2 0.6.0", - "objc2-foundation 0.3.0", + "objc2 0.6.2", + "objc2-foundation 0.3.1", ] [[package]] @@ -2133,7 +2019,7 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ee638a5da3799329310ad4cfa62fbf045d5f56e3ef5ba4149e7452dcf89d5a8" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.9.3", "block2 0.5.1", "libc", "objc2 0.5.2", @@ -2141,25 +2027,35 @@ dependencies = [ [[package]] name = "objc2-foundation" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a21c6c9014b82c39515db5b396f91645182611c97d24637cf56ac01e5f8d998" +checksum = "900831247d2fe1a09a683278e5384cfb8c80c79fe6b166f9d14bfdde0ea1b03c" dependencies = [ - "bitflags 2.6.0", - "block2 0.6.0", + "bitflags 2.9.3", + "block2 0.6.1", "libc", - "objc2 0.6.0", + "objc2 0.6.2", "objc2-core-foundation", ] [[package]] name = "objc2-io-surface" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "161a8b87e32610086e1a7a9e9ec39f84459db7b3a0881c1f16ca5a2605581c19" +checksum = "7282e9ac92529fa3457ce90ebb15f4ecbc383e8338060960760fa2cf75420c3c" dependencies = [ - "bitflags 2.6.0", - "objc2 0.6.0", + "bitflags 2.9.3", + "objc2 0.6.2", + "objc2-core-foundation", +] + +[[package]] +name = "objc2-javascript-core" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9052cb1bb50a4c161d934befcf879526fb87ae9a68858f241e693ca46225cf5a" +dependencies = [ + "objc2 0.6.2", "objc2-core-foundation", ] @@ -2169,7 +2065,7 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd0cba1276f6023976a406a14ffa85e1fdd19df6b0f737b063b95f6c8c7aadd6" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.9.3", "block2 0.5.1", "objc2 0.5.2", "objc2-foundation 0.2.2", @@ -2181,7 +2077,7 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e42bee7bff906b14b167da2bac5efe6b6a07e6f7c0a21a7308d40c960242dc7a" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.9.3", "block2 0.5.1", "objc2 0.5.2", "objc2-foundation 0.2.2", @@ -2190,55 +2086,59 @@ dependencies = [ [[package]] name = "objc2-quartz-core" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fb3794501bb1bee12f08dcad8c61f2a5875791ad1c6f47faa71a0f033f20071" +checksum = "90ffb6a0cd5f182dc964334388560b12a57f7b74b3e2dec5e2722aa2dfb2ccd5" dependencies = [ - "bitflags 2.6.0", - "objc2 0.6.0", - "objc2-foundation 0.3.0", + "bitflags 2.9.3", + "objc2 0.6.2", + "objc2-foundation 0.3.1", ] [[package]] -name = "objc2-ui-kit" -version = "0.3.0" +name = "objc2-security" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "777a571be14a42a3990d4ebedaeb8b54cd17377ec21b92e8200ac03797b3bee1" +checksum = "e1f8e0ef3ab66b08c42644dcb34dba6ec0a574bbd8adbb8bdbdc7a2779731a44" dependencies = [ - "bitflags 2.6.0", - "objc2 0.6.0", + "bitflags 2.9.3", + "objc2 0.6.2", "objc2-core-foundation", - "objc2-foundation 0.3.0", ] [[package]] -name = "objc2-web-kit" -version = "0.3.0" +name = "objc2-ui-kit" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b717127e4014b0f9f3e8bba3d3f2acec81f1bde01f656823036e823ed2c94dce" +checksum = "25b1312ad7bc8a0e92adae17aa10f90aae1fb618832f9b993b022b591027daed" dependencies = [ - "bitflags 2.6.0", - "block2 0.6.0", - "objc2 0.6.0", - "objc2-app-kit", + "bitflags 2.9.3", + "objc2 0.6.2", "objc2-core-foundation", - "objc2-foundation 0.3.0", + "objc2-foundation 0.3.1", ] [[package]] -name = "object" -version = "0.36.5" +name = "objc2-web-kit" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aedf0a2d09c573ed1d8d85b30c119153926a2b36dce0ab28322c09a117a4683e" +checksum = "91672909de8b1ce1c2252e95bbee8c1649c9ad9d14b9248b3d7b4c47903c47ad" dependencies = [ - "memchr", + "bitflags 2.9.3", + "block2 0.6.1", + "objc2 0.6.2", + "objc2-app-kit", + "objc2-core-foundation", + "objc2-foundation 0.3.1", + "objc2-javascript-core", + "objc2-security", ] [[package]] name = "once_cell" -version = "1.20.2" +version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" [[package]] name = "option-ext" @@ -2273,9 +2173,9 @@ dependencies = [ [[package]] name = "parking_lot" -version = "0.12.3" +version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" +checksum = "70d58bf43669b5795d1576d0641cfb6fbb2057bf629506267a92807158584a13" dependencies = [ "lock_api", "parking_lot_core", @@ -2283,9 +2183,9 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.10" +version = "0.9.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" +checksum = "bc838d2a56b5b1a6c25f55575dfc605fabb63bb2365f6c2353ef9159aa69e4a5" dependencies = [ "cfg-if", "libc", @@ -2296,9 +2196,9 @@ dependencies = [ [[package]] name = "percent-encoding" -version = "2.3.1" +version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" [[package]] name = "phf" @@ -2322,12 +2222,12 @@ dependencies = [ [[package]] name = "phf" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ade2d8b8f33c7333b51bcf0428d37e217e9f32192ae4772156f65063b8ce03dc" +checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078" dependencies = [ - "phf_macros 0.11.2", - "phf_shared 0.11.2", + "phf_macros 0.11.3", + "phf_shared 0.11.3", ] [[package]] @@ -2346,8 +2246,8 @@ version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aef8048c789fa5e851558d709946d6d79a8ff88c0440c587967f8e94bfb1216a" dependencies = [ - "phf_generator 0.11.2", - "phf_shared 0.11.2", + "phf_generator 0.11.3", + "phf_shared 0.11.3", ] [[package]] @@ -2372,11 +2272,11 @@ dependencies = [ [[package]] name = "phf_generator" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "48e4cc64c2ad9ebe670cb8fd69dd50ae301650392e81c05f9bfcb2d5bdbc24b0" +checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d" dependencies = [ - "phf_shared 0.11.2", + "phf_shared 0.11.3", "rand 0.8.5", ] @@ -2396,15 +2296,15 @@ dependencies = [ [[package]] name = "phf_macros" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3444646e286606587e49f3bcf1679b8cef1dc2c5ecc29ddacaffc305180d464b" +checksum = "f84ac04429c13a7ff43785d75ad27569f2951ce0ffd30a3321230db2fc727216" dependencies = [ - "phf_generator 0.11.2", - "phf_shared 0.11.2", + "phf_generator 0.11.3", + "phf_shared 0.11.3", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] @@ -2413,7 +2313,7 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c00cf8b9eafe68dde5e9eaa2cef8ee84a9336a47d566ec55ca16589633b65af7" dependencies = [ - "siphasher", + "siphasher 0.3.11", ] [[package]] @@ -2422,43 +2322,43 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6796ad771acdc0123d2a88dc428b5e38ef24456743ddb1744ed628f9815c096" dependencies = [ - "siphasher", + "siphasher 0.3.11", ] [[package]] name = "phf_shared" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b" +checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" dependencies = [ - "siphasher", + "siphasher 1.0.1", ] [[package]] name = "pin-project" -version = "1.1.7" +version = "1.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be57f64e946e500c8ee36ef6331845d40a93055567ec57e8fae13efd33759b95" +checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.7" +version = "1.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c0f5fad0874fc7abcd4d750e76917eaebbecaa2c20bde22e1dbeeba8beb758c" +checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] name = "pin-project-lite" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "915a1e146535de9163f3987b8944ed8cf49a18bb0056bcebcdcece385cece4ff" +checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" [[package]] name = "pin-utils" @@ -2468,18 +2368,18 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] name = "pkg-config" -version = "0.3.31" +version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" +checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" [[package]] name = "plist" -version = "1.7.0" +version = "1.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42cf17e9a1800f5f396bc67d193dc9411b59012a5876445ef450d449881e1016" +checksum = "3af6b589e163c5a788fab00ce0c0366f6efbb9959c2f9874b224936af7fce7e1" dependencies = [ "base64 0.22.1", - "indexmap 2.7.0", + "indexmap 2.11.0", "quick-xml", "serde", "time", @@ -2487,9 +2387,9 @@ dependencies = [ [[package]] name = "png" -version = "0.17.15" +version = "0.17.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b67582bd5b65bdff614270e2ea89a1cf15bef71245cc1e5f7ea126977144211d" +checksum = "82151a2fc869e011c153adc57cf2789ccb8d9906ce52c0b39a6b5697749d7526" dependencies = [ "bitflags 1.3.2", "crc32fast", @@ -2499,16 +2399,25 @@ dependencies = [ ] [[package]] -name = "powerfmt" -version = "0.2.0" +name = "potential_utf" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5a7c30837279ca13e7c867e9e40053bc68740f988cb07f7ca6df43cc734b585" +dependencies = [ + "zerovec", +] + +[[package]] +name = "powerfmt" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "ppv-lite86" -version = "0.2.20" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" +checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9" dependencies = [ "zerocopy", ] @@ -2531,10 +2440,11 @@ dependencies = [ [[package]] name = "proc-macro-crate" -version = "2.0.0" +version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e8366a6159044a37876a2b9817124296703c586a5c92e2c53751fa06d8d43e8" +checksum = "b00f26d3400549137f92511a46ac1cd8ce37cb5598a96d382381458b992a5d24" dependencies = [ + "toml_datetime 0.6.3", "toml_edit 0.20.2", ] @@ -2570,27 +2480,27 @@ checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" [[package]] name = "proc-macro2" -version = "1.0.92" +version = "1.0.101" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" +checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de" dependencies = [ "unicode-ident", ] [[package]] name = "quick-xml" -version = "0.32.0" +version = "0.38.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d3a6e5838b60e0e8fa7a43f22ade549a37d61f8bdbe636d0d7816191de969c2" +checksum = "42a232e7487fc2ef313d96dde7948e7a3c05101870d8985e4fd8d26aedd27b89" dependencies = [ "memchr", ] [[package]] name = "quote" -version = "1.0.37" +version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" +checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" dependencies = [ "proc-macro2", ] @@ -2661,7 +2571,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.2.16", ] [[package]] @@ -2690,29 +2600,49 @@ checksum = "20675572f6f24e9e76ef639bc5552774ed45f1c30e2951e1e99c59888861c539" [[package]] name = "redox_syscall" -version = "0.5.8" +version = "0.5.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03a862b389f93e68874fbf580b9de08dd02facb9a788ebadaf4a3fd33cf58834" +checksum = "5407465600fb0548f1442edf71dd20683c6ed326200ace4b1ef0763521bb3b77" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.9.3", ] [[package]] name = "redox_users" -version = "0.5.0" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd6f9d3d47bdd2ad6945c5015a226ec6155d0bcdfd8f7cd29f86b71f8de99d2b" +checksum = "a4e608c6638b9c18977b00b475ac1f28d14e84b27d8d42f70e0bf1e3dec127ac" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.2.16", "libredox", - "thiserror 2.0.7", + "thiserror 2.0.16", +] + +[[package]] +name = "ref-cast" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a0ae411dbe946a674d89546582cea4ba2bb8defac896622d6496f14c23ba5cf" +dependencies = [ + "ref-cast-impl", +] + +[[package]] +name = "ref-cast-impl" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1165225c21bff1f3bbce98f5a1f889949bc902d3575308cc7b0de30b4f6d27c7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.106", ] [[package]] name = "regex" -version = "1.11.1" +version = "1.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" +checksum = "23d7fd106d8c02486a8d64e778353d1cffe08ce79ac2e82f540c86d0facf6912" dependencies = [ "aho-corasick", "memchr", @@ -2722,9 +2652,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.9" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" +checksum = "6b9458fa0bfeeac22b5ca447c63aaf45f28439a709ccd244698632f9aa6394d6" dependencies = [ "aho-corasick", "memchr", @@ -2733,30 +2663,27 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" +checksum = "caf4aa5b0f434c91fe5c7f1ecb6a5ece2130b02ad2a590589dda5146df959001" [[package]] name = "reqwest" -version = "0.12.9" +version = "0.12.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a77c62af46e79de0a562e1a9849205ffcb7fc1238876e9bd743357570e04046f" +checksum = "d429f34c8092b2d42c7c93cec323bb4adeb7c67698f70839adec842ec10c7ceb" dependencies = [ "base64 0.22.1", "bytes", "futures-core", "futures-util", - "http 1.2.0", - "http-body 1.0.1", + "http", + "http-body", "http-body-util", - "hyper 1.5.2", + "hyper", "hyper-util", - "ipnet", "js-sys", "log", - "mime", - "once_cell", "percent-encoding", "pin-project-lite", "serde", @@ -2765,36 +2692,30 @@ dependencies = [ "sync_wrapper", "tokio", "tokio-util", + "tower", + "tower-http", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", "wasm-streams", "web-sys", - "windows-registry", ] [[package]] name = "ring" -version = "0.17.8" +version = "0.17.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" +checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7" dependencies = [ "cc", "cfg-if", - "getrandom 0.2.15", + "getrandom 0.2.16", "libc", - "spin", "untrusted", "windows-sys 0.52.0", ] -[[package]] -name = "rustc-demangle" -version = "0.1.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" - [[package]] name = "rustc_version" version = "0.4.1" @@ -2806,9 +2727,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.23" +version = "0.23.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "47796c98c480fce5406ef69d1c76378375492c3b0a0de587be0c1d9feb12f395" +checksum = "c0ebcbd2f03de0fc1122ad9bb24b127a5a6cd51d72604a3f3c50ac459762b6cc" dependencies = [ "log", "once_cell", @@ -2830,26 +2751,35 @@ dependencies = [ [[package]] name = "rustls-pki-types" -version = "1.11.0" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "917ce264624a4b4db1c364dcc35bfca9ded014d0a958cd47ad3e960e988ea51c" +checksum = "229a4a4c221013e7e1f1a043678c5cc39fe5171437c88fb47151a21e6f5b5c79" +dependencies = [ + "zeroize", +] [[package]] name = "rustls-webpki" -version = "0.102.8" +version = "0.103.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" +checksum = "0a17884ae0c1b773f1ccd2bd4a8c72f16da897310a98b0e84bf349ad5ead92fc" dependencies = [ "ring", "rustls-pki-types", "untrusted", ] +[[package]] +name = "rustversion" +version = "1.0.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" + [[package]] name = "ryu" -version = "1.0.18" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" +checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" [[package]] name = "same-file" @@ -2862,9 +2792,9 @@ dependencies = [ [[package]] name = "schemars" -version = "0.8.21" +version = "0.8.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09c024468a378b7e36765cd36702b7a90cc3cba11654f6685c8f233408e89e92" +checksum = "3fbf2ae1b8bc8e02df939598064d22402220cd5bbcca1c76f7d6a310974d5615" dependencies = [ "dyn-clone", "indexmap 1.9.3", @@ -2875,16 +2805,40 @@ dependencies = [ "uuid", ] +[[package]] +name = "schemars" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cd191f9397d57d581cddd31014772520aa448f65ef991055d7f61582c65165f" +dependencies = [ + "dyn-clone", + "ref-cast", + "serde", + "serde_json", +] + +[[package]] +name = "schemars" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82d20c4491bc164fa2f6c5d44565947a52ad80b9505d8e36f8d54c27c739fcd0" +dependencies = [ + "dyn-clone", + "ref-cast", + "serde", + "serde_json", +] + [[package]] name = "schemars_derive" -version = "0.8.21" +version = "0.8.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1eee588578aff73f856ab961cd2f79e36bc45d7ded33a7562adba4667aecc0e" +checksum = "32e265784ad618884abaea0600a9adf15393368d840e0222d101a072f3f7534d" dependencies = [ "proc-macro2", "quote", "serde_derive_internals", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] @@ -2919,42 +2873,52 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.24" +version = "1.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cb6eb87a131f756572d7fb904f6e7b68633f09cca868c5df1c4b8d1a694bbba" +checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0" dependencies = [ "serde", ] [[package]] name = "serde" -version = "1.0.219" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" dependencies = [ + "serde_core", "serde_derive", ] [[package]] name = "serde-untagged" -version = "0.1.6" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2676ba99bd82f75cae5cbd2c8eda6fa0b8760f18978ea840e980dd5567b5c5b6" +checksum = "34836a629bcbc6f1afdf0907a744870039b1e14c0561cb26094fa683b158eff3" dependencies = [ "erased-serde", "serde", "typeid", ] +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + [[package]] name = "serde_derive" -version = "1.0.219" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] @@ -2965,37 +2929,47 @@ checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] name = "serde_json" -version = "1.0.140" +version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373" +checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c" dependencies = [ "itoa", "memchr", "ryu", "serde", + "serde_core", ] [[package]] name = "serde_repr" -version = "0.1.19" +version = "0.1.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c64451ba24fc7a6a2d60fc75dd9c83c90903b19028d4eff35e88fc1e86564e9" +checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] name = "serde_spanned" -version = "0.6.8" +version = "0.6.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1" +checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_spanned" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40734c41988f7306bb04f0ecf60ec0f3f1caa34290e4e8ea471dcd3346483b83" dependencies = [ "serde", ] @@ -3014,15 +2988,17 @@ dependencies = [ [[package]] name = "serde_with" -version = "3.11.0" +version = "3.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e28bdad6db2b8340e449f7108f020b3b092e8583a9e3fb82713e1d4e71fe817" +checksum = "f2c45cd61fefa9db6f254525d46e392b852e0e61d9a1fd36e5bd183450a556d5" dependencies = [ "base64 0.22.1", "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.7.0", + "indexmap 2.11.0", + "schemars 0.9.0", + "schemars 1.0.4", "serde", "serde_derive", "serde_json", @@ -3032,21 +3008,21 @@ dependencies = [ [[package]] name = "serde_with_macros" -version = "3.11.0" +version = "3.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d846214a9854ef724f3da161b426242d8de7c1fc7de2f89bb1efcb154dca79d" +checksum = "de90945e6565ce0d9a25098082ed4ee4002e047cb59892c318d66821e14bb30f" dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] name = "serialize-to-javascript" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9823f2d3b6a81d98228151fdeaf848206a7855a7a042bbf9bf870449a66cafb" +checksum = "04f3666a07a197cdb77cdf306c32be9b7f598d7060d50cfd4d5aa04bfd92f6c5" dependencies = [ "serde", "serde_json", @@ -3055,13 +3031,13 @@ dependencies = [ [[package]] name = "serialize-to-javascript-impl" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74064874e9f6a15f04c1f3cb627902d0e6b410abbf36668afa873c61889f1763" +checksum = "772ee033c0916d670af7860b6e1ef7d658a4629a6d0b4c8c3e67f09b3765b75d" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.106", ] [[package]] @@ -3087,9 +3063,9 @@ dependencies = [ [[package]] name = "sha2" -version = "0.10.8" +version = "0.10.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" dependencies = [ "cfg-if", "cpufeatures", @@ -3104,9 +3080,9 @@ checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" [[package]] name = "signal-hook-registry" -version = "1.4.2" +version = "1.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" +checksum = "b2a4719bff48cee6b39d12c020eeb490953ad2443b7055bd0b21fca26bd8c28b" dependencies = [ "libc", ] @@ -3123,29 +3099,32 @@ version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" +[[package]] +name = "siphasher" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" + [[package]] name = "slab" -version = "0.4.9" +version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" -dependencies = [ - "autocfg", -] +checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" [[package]] name = "smallvec" -version = "1.13.2" +version = "1.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" +checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" [[package]] name = "socket2" -version = "0.5.8" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8" +checksum = "233504af464074f9d066d7b5416c5f9b894a5862a6506e306f7b816cdd6f1807" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -3196,12 +3175,6 @@ dependencies = [ "system-deps", ] -[[package]] -name = "spin" -version = "0.9.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" - [[package]] name = "stable_deref_trait" version = "1.2.0" @@ -3210,26 +3183,25 @@ checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" [[package]] name = "string_cache" -version = "0.8.7" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f91138e76242f575eb1d3b38b4f1362f10d3a43f47d182a5b359af488a02293b" +checksum = "bf776ba3fa74f83bf4b63c3dcbbf82173db2632ed8452cb2d891d33f459de70f" dependencies = [ "new_debug_unreachable", - "once_cell", "parking_lot", - "phf_shared 0.10.0", + "phf_shared 0.11.3", "precomputed-hash", "serde", ] [[package]] name = "string_cache_codegen" -version = "0.5.2" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bb30289b722be4ff74a408c3cc27edeaad656e06cb1fe8fa9231fa59c728988" +checksum = "c711928715f1fe0fe509c53b43e993a9a557babc2d0a3567d0a3006f1ac931a0" dependencies = [ - "phf_generator 0.10.0", - "phf_shared 0.10.0", + "phf_generator 0.11.3", + "phf_shared 0.11.3", "proc-macro2", "quote", ] @@ -3270,9 +3242,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.90" +version = "2.0.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "919d3b74a5dd0ccd15aeb8f93e7006bd9e14c295087c9896a110f490752bcf31" +checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6" dependencies = [ "proc-macro2", "quote", @@ -3290,13 +3262,13 @@ dependencies = [ [[package]] name = "synstructure" -version = "0.13.1" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" +checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] @@ -3308,17 +3280,18 @@ dependencies = [ "cfg-expr", "heck 0.5.0", "pkg-config", - "toml", + "toml 0.8.2", "version-compare", ] [[package]] name = "tao" -version = "0.34.0" +version = "0.34.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49c380ca75a231b87b6c9dd86948f035012e7171d1a7c40a9c2890489a7ffd8a" +checksum = "f3a753bdc39c07b192151523a3f77cd0394aa75413802c883a0f6f6a0e5ee2e7" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.9.3", + "block2 0.6.1", "core-foundation", "core-graphics", "crossbeam-channel", @@ -3335,9 +3308,9 @@ dependencies = [ "ndk", "ndk-context", "ndk-sys", - "objc2 0.6.0", + "objc2 0.6.2", "objc2-app-kit", - "objc2-foundation 0.3.0", + "objc2-foundation 0.3.1", "once_cell", "parking_lot", "raw-window-handle", @@ -3346,7 +3319,7 @@ dependencies = [ "unicode-segmentation", "url", "windows", - "windows-core 0.61.0", + "windows-core", "windows-version", "x11-dl", ] @@ -3359,7 +3332,7 @@ checksum = "f4e16beb8b2ac17db28eab8bca40e62dbfbb34c0fcdc6d9826b11b7b5d047dfd" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] @@ -3370,12 +3343,13 @@ checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1" [[package]] name = "tauri" -version = "2.6.2" +version = "2.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "124e129c9c0faa6bec792c5948c89e86c90094133b0b9044df0ce5f0a8efaa0d" +checksum = "8bceb52453e507c505b330afe3398510e87f428ea42b6e76ecb6bd63b15965b5" dependencies = [ "anyhow", "bytes", + "cookie", "dirs", "dunce", "embed_plist", @@ -3383,16 +3357,17 @@ dependencies = [ "glob", "gtk", "heck 0.5.0", - "http 1.2.0", + "http", "jni", "libc", "log", "mime", "muda", - "objc2 0.6.0", + "objc2 0.6.2", "objc2-app-kit", - "objc2-foundation 0.3.0", + "objc2-foundation 0.3.1", "objc2-ui-kit", + "objc2-web-kit", "percent-encoding", "plist", "raw-window-handle", @@ -3407,11 +3382,10 @@ dependencies = [ "tauri-runtime", "tauri-runtime-wry", "tauri-utils", - "thiserror 2.0.7", + "thiserror 2.0.16", "tokio", "tray-icon", "url", - "urlpattern", "webkit2gtk", "webview2-com", "window-vibrancy", @@ -3420,9 +3394,9 @@ dependencies = [ [[package]] name = "tauri-build" -version = "2.3.0" +version = "2.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12f025c389d3adb83114bec704da973142e82fc6ec799c7c750c5e21cefaec83" +checksum = "a924b6c50fe83193f0f8b14072afa7c25b7a72752a2a73d9549b463f5fe91a38" dependencies = [ "anyhow", "cargo_toml", @@ -3430,21 +3404,21 @@ dependencies = [ "glob", "heck 0.5.0", "json-patch", - "schemars", + "schemars 0.8.22", "semver", "serde", "serde_json", "tauri-utils", "tauri-winres", - "toml", + "toml 0.9.5", "walkdir", ] [[package]] name = "tauri-codegen" -version = "2.3.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5df493a1075a241065bc865ed5ef8d0fbc1e76c7afdc0bf0eccfaa7d4f0e406" +checksum = "6c1fe64c74cc40f90848281a90058a6db931eb400b60205840e09801ee30f190" dependencies = [ "base64 0.22.1", "brotli", @@ -3458,9 +3432,9 @@ dependencies = [ "serde", "serde_json", "sha2", - "syn 2.0.90", + "syn 2.0.106", "tauri-utils", - "thiserror 2.0.7", + "thiserror 2.0.16", "time", "url", "uuid", @@ -3469,53 +3443,56 @@ dependencies = [ [[package]] name = "tauri-macros" -version = "2.3.1" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f237fbea5866fa5f2a60a21bea807a2d6e0379db070d89c3a10ac0f2d4649bbc" +checksum = "260c5d2eb036b76206b9fca20b7be3614cfd21046c5396f7959e0e64a4b07f2f" dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", "tauri-codegen", "tauri-utils", ] [[package]] name = "tauri-runtime" -version = "2.7.0" +version = "2.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e7bb73d1bceac06c20b3f755b2c8a2cb13b20b50083084a8cf3700daf397ba4" +checksum = "9368f09358496f2229313fccb37682ad116b7f46fa76981efe116994a0628926" dependencies = [ "cookie", "dpi", "gtk", - "http 1.2.0", + "http", "jni", - "objc2 0.6.0", + "objc2 0.6.2", "objc2-ui-kit", + "objc2-web-kit", "raw-window-handle", "serde", "serde_json", "tauri-utils", - "thiserror 2.0.7", + "thiserror 2.0.16", "url", + "webkit2gtk", + "webview2-com", "windows", ] [[package]] name = "tauri-runtime-wry" -version = "2.7.1" +version = "2.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "902b5aa9035e16f342eb64f8bf06ccdc2808e411a2525ed1d07672fa4e780bad" +checksum = "929f5df216f5c02a9e894554401bcdab6eec3e39ec6a4a7731c7067fc8688a93" dependencies = [ "gtk", - "http 1.2.0", + "http", "jni", "log", - "objc2 0.6.0", + "objc2 0.6.2", "objc2-app-kit", - "objc2-foundation 0.3.0", + "objc2-foundation 0.3.1", "once_cell", "percent-encoding", "raw-window-handle", @@ -3532,9 +3509,9 @@ dependencies = [ [[package]] name = "tauri-utils" -version = "2.5.0" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41743bbbeb96c3a100d234e5a0b60a46d5aa068f266160862c7afdbf828ca02e" +checksum = "f6b8bbe426abdbf52d050e52ed693130dbd68375b9ad82a3fb17efb4c8d85673" dependencies = [ "anyhow", "brotli", @@ -3543,25 +3520,25 @@ dependencies = [ "dunce", "glob", "html5ever", - "http 1.2.0", + "http", "infer", "json-patch", "kuchikiki", "log", "memchr", - "phf 0.11.2", + "phf 0.11.3", "proc-macro2", "quote", "regex", - "schemars", + "schemars 0.8.22", "semver", "serde", "serde-untagged", "serde_json", "serde_with", "swift-rs", - "thiserror 2.0.7", - "toml", + "thiserror 2.0.16", + "toml 0.9.5", "url", "urlpattern", "uuid", @@ -3570,12 +3547,12 @@ dependencies = [ [[package]] name = "tauri-winres" -version = "0.3.0" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56eaa45f707bedf34d19312c26d350bc0f3c59a47e58e8adbeecdc850d2c13a0" +checksum = "fd21509dd1fa9bd355dc29894a6ff10635880732396aa38c0066c1e6c1ab8074" dependencies = [ "embed-resource", - "toml", + "toml 0.9.5", ] [[package]] @@ -3600,11 +3577,11 @@ dependencies = [ [[package]] name = "thiserror" -version = "2.0.7" +version = "2.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93605438cbd668185516ab499d589afb7ee1859ea3d5fc8f6b0755e1c7443767" +checksum = "3467d614147380f2e4e374161426ff399c91084acd2363eaf549172b3d5e60c0" dependencies = [ - "thiserror-impl 2.0.7", + "thiserror-impl 2.0.16", ] [[package]] @@ -3615,25 +3592,25 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] name = "thiserror-impl" -version = "2.0.7" +version = "2.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1d8749b4531af2117677a5fcd12b1348a3fe2b81e36e61ffeac5c4aa3273e36" +checksum = "6c5e1be1c48b9172ee610da68fd9cd2770e7a4056cb3fc98710ee6906f0c7960" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] name = "time" -version = "0.3.37" +version = "0.3.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35e7868883861bd0e56d9ac6efcaaca0d6d5d82a2a7ec8209ff492c07cf37b21" +checksum = "8a7619e19bc266e0f9c5e6686659d394bc57973859340060a69221e57dbc0c40" dependencies = [ "deranged", "itoa", @@ -3646,15 +3623,15 @@ dependencies = [ [[package]] name = "time-core" -version = "0.1.2" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" +checksum = "c9e9a38711f559d9e3ce1cdb06dd7c5b8ea546bc90052da6d06bb76da74bb07c" [[package]] name = "time-macros" -version = "0.2.19" +version = "0.2.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2834e6017e3e5e4b9834939793b282bc03b37a3336245fa820e35e233e2a85de" +checksum = "3526739392ec93fd8b359c8e98514cb3e8e021beb4e5f597b00a0221f8ed8a49" dependencies = [ "num-conv", "time-core", @@ -3662,9 +3639,9 @@ dependencies = [ [[package]] name = "tinystr" -version = "0.7.6" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" +checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b" dependencies = [ "displaydoc", "zerovec", @@ -3672,52 +3649,37 @@ dependencies = [ [[package]] name = "tokio" -version = "1.46.1" +version = "1.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cc3a2344dafbe23a245241fe8b09735b521110d30fcefbbd5feb1797ca35d17" +checksum = "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408" dependencies = [ - "backtrace", "bytes", - "io-uring", "libc", "mio", "parking_lot", "pin-project-lite", "signal-hook-registry", - "slab", "socket2", "tokio-macros", - "windows-sys 0.52.0", + "windows-sys 0.61.2", ] [[package]] name = "tokio-macros" -version = "2.5.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" +checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", -] - -[[package]] -name = "tokio-tungstenite" -version = "0.21.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c83b561d025642014097b66e6c1bb422783339e0909e4429cde4749d1990bc38" -dependencies = [ - "futures-util", - "log", - "tokio", - "tungstenite", + "syn 2.0.106", ] [[package]] name = "tokio-util" -version = "0.7.13" +version = "0.7.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7fcaa8d55a2bdd6b83ace262b016eca0d79ee02818c5c1bcdf0305114081078" +checksum = "14307c986784f72ef81c89db7d9e28d6ac26d16213b109ea501696195e6e3ce5" dependencies = [ "bytes", "futures-core", @@ -3728,21 +3690,45 @@ dependencies = [ [[package]] name = "toml" -version = "0.8.19" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "185d8ab0dfbb35cf1399a6344d8484209c088f75f8f68230da55d48d95d43e3d" +dependencies = [ + "serde", + "serde_spanned 0.6.9", + "toml_datetime 0.6.3", + "toml_edit 0.20.2", +] + +[[package]] +name = "toml" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" +checksum = "75129e1dc5000bfbaa9fee9d1b21f974f9fbad9daec557a521ee6e080825f6e8" dependencies = [ + "indexmap 2.11.0", "serde", - "serde_spanned", - "toml_datetime", - "toml_edit 0.22.23", + "serde_spanned 1.0.0", + "toml_datetime 0.7.0", + "toml_parser", + "toml_writer", + "winnow 0.7.13", ] [[package]] name = "toml_datetime" -version = "0.6.8" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" +checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_datetime" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bade1c3e902f58d73d3f294cd7f20391c1cb2fbcb643b73566bc773971df91e3" dependencies = [ "serde", ] @@ -3753,8 +3739,8 @@ version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "indexmap 2.7.0", - "toml_datetime", + "indexmap 2.11.0", + "toml_datetime 0.6.3", "winnow 0.5.40", ] @@ -3764,24 +3750,67 @@ version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "396e4d48bbb2b7554c944bde63101b5ae446cff6ec4a24227428f15eb72ef338" dependencies = [ - "indexmap 2.7.0", - "toml_datetime", + "indexmap 2.11.0", + "serde", + "serde_spanned 0.6.9", + "toml_datetime 0.6.3", "winnow 0.5.40", ] [[package]] -name = "toml_edit" -version = "0.22.23" +name = "toml_parser" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02a8b472d1a3d7c18e2d61a489aee3453fd9031c33e4f55bd533f4a7adca1bee" +checksum = "b551886f449aa90d4fe2bdaa9f4a2577ad2dde302c61ecf262d80b116db95c10" dependencies = [ - "indexmap 2.7.0", - "serde", - "serde_spanned", - "toml_datetime", - "winnow 0.7.0", + "winnow 0.7.13", ] +[[package]] +name = "toml_writer" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcc842091f2def52017664b53082ecbbeb5c7731092bad69d2c63050401dfd64" + +[[package]] +name = "tower" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" +dependencies = [ + "futures-core", + "futures-util", + "pin-project-lite", + "sync_wrapper", + "tokio", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-http" +version = "0.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2" +dependencies = [ + "bitflags 2.9.3", + "bytes", + "futures-util", + "http", + "http-body", + "iri-string", + "pin-project-lite", + "tower", + "tower-layer", + "tower-service", +] + +[[package]] +name = "tower-layer" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" + [[package]] name = "tower-service" version = "0.3.3" @@ -3801,32 +3830,32 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.33" +version = "0.1.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" +checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678" dependencies = [ "once_cell", ] [[package]] name = "tray-icon" -version = "0.21.0" +version = "0.21.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2da75ec677957aa21f6e0b361df0daab972f13a5bee3606de0638fd4ee1c666a" +checksum = "a0d92153331e7d02ec09137538996a7786fe679c629c279e82a6be762b7e6fe2" dependencies = [ "crossbeam-channel", "dirs", "libappindicator", "muda", - "objc2 0.6.0", + "objc2 0.6.2", "objc2-app-kit", "objc2-core-foundation", "objc2-core-graphics", - "objc2-foundation 0.3.0", + "objc2-foundation 0.3.1", "once_cell", "png", "serde", - "thiserror 2.0.7", + "thiserror 2.0.16", "windows-sys 0.59.0", ] @@ -3836,36 +3865,17 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" -[[package]] -name = "tungstenite" -version = "0.21.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ef1a641ea34f399a848dea702823bbecfb4c486f911735368f1f137cb8257e1" -dependencies = [ - "byteorder", - "bytes", - "data-encoding", - "http 1.2.0", - "httparse", - "log", - "rand 0.8.5", - "sha1", - "thiserror 1.0.69", - "url", - "utf-8", -] - [[package]] name = "typeid" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e13db2e0ccd5e14a544e8a246ba2312cd25223f616442d7f2cb0e3db614236e" +checksum = "bc7d623258602320d5c55d1bc22793b57daff0ec7efc270ea7d55ce1d5f5471c" [[package]] name = "typenum" -version = "1.17.0" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" +checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" [[package]] name = "unic-char-property" @@ -3910,15 +3920,15 @@ dependencies = [ [[package]] name = "unicase" -version = "2.8.0" +version = "2.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e51b68083f157f853b6379db119d1c1be0e6e4dec98101079dec41f6f5cf6df" +checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" [[package]] name = "unicode-ident" -version = "1.0.14" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" +checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" [[package]] name = "unicode-segmentation" @@ -3934,9 +3944,9 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "ureq" -version = "3.0.12" +version = "3.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f0fde9bc91026e381155f8c67cb354bcd35260b2f4a29bcc84639f762760c39" +checksum = "99ba1025f18a4a3fc3e9b48c868e9beb4f24f4b4b1a325bada26bd4119f46537" dependencies = [ "base64 0.22.1", "flate2", @@ -3952,21 +3962,21 @@ dependencies = [ [[package]] name = "ureq-proto" -version = "0.4.2" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59db78ad1923f2b1be62b6da81fe80b173605ca0d57f85da2e005382adf693f7" +checksum = "60b4531c118335662134346048ddb0e54cc86bd7e81866757873055f0e38f5d2" dependencies = [ "base64 0.22.1", - "http 1.2.0", + "http", "httparse", "log", ] [[package]] name = "url" -version = "2.5.4" +version = "2.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" +checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b" dependencies = [ "form_urlencoded", "idna", @@ -3992,12 +4002,6 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" -[[package]] -name = "utf16_iter" -version = "1.0.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" - [[package]] name = "utf8_iter" version = "1.0.4" @@ -4006,12 +4010,14 @@ checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" [[package]] name = "uuid" -version = "1.11.0" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8c5f0a0af699448548ad1a2fbf920fb4bee257eae39953ba95cb84891a0446a" +checksum = "f33196643e165781c20a5ead5582283a7dacbb87855d867fbc2df3f81eddc1be" dependencies = [ - "getrandom 0.2.15", + "getrandom 0.3.3", + "js-sys", "serde", + "wasm-bindgen", ] [[package]] @@ -4038,9 +4044,9 @@ dependencies = [ [[package]] name = "vswhom-sys" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3b17ae1f6c8a2b28506cd96d412eebf83b4a0ff2cbefeeb952f2f9dfa44ba18" +checksum = "fb067e4cbd1ff067d1df46c9194b5de0e98efd2810bbc95c5d5e5f25a3231150" dependencies = [ "cc", "libc", @@ -4067,20 +4073,21 @@ dependencies = [ [[package]] name = "warp" -version = "0.3.7" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4378d202ff965b011c64817db11d5829506d3404edeadb61f190d111da3f231c" +checksum = "51d06d9202adc1f15d709c4f4a2069be5428aa912cc025d6f268ac441ab066b0" dependencies = [ "bytes", - "futures-channel", "futures-util", "headers", - "http 0.2.12", - "hyper 0.14.32", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-util", "log", "mime", "mime_guess", - "multer", "percent-encoding", "pin-project", "scoped-tls", @@ -4088,7 +4095,6 @@ dependencies = [ "serde_json", "serde_urlencoded", "tokio", - "tokio-tungstenite", "tokio-util", "tower-service", "tracing", @@ -4102,9 +4108,9 @@ checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" [[package]] name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" +version = "0.11.1+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" [[package]] name = "wasi" @@ -4117,34 +4123,35 @@ dependencies = [ [[package]] name = "wasm-bindgen" -version = "0.2.99" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a474f6281d1d70c17ae7aa6a613c87fce69a127e2624002df63dcb39d6cf6396" +checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" dependencies = [ "cfg-if", "once_cell", + "rustversion", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" -version = "0.2.99" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f89bb38646b4f81674e8f5c3fb81b562be1fd936d84320f3264486418519c79" +checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" dependencies = [ "bumpalo", "log", "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.49" +version = "0.4.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38176d9b44ea84e9184eff0bc34cc167ed044f816accfe5922e54d84cf48eca2" +checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61" dependencies = [ "cfg-if", "js-sys", @@ -4155,9 +4162,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.99" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2cc6181fd9a7492eef6fef1f33961e3695e4579b9872a6f7c83aee556666d4fe" +checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -4165,22 +4172,25 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.99" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30d7a95b763d3c45903ed6c81f156801839e5ee968bb07e534c44df0fcd330c2" +checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.99" +version = "0.2.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "943aab3fdaaa029a6e0271b35ea10b72b943135afe9bffca82384098ad0e06a6" +checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +dependencies = [ + "unicode-ident", +] [[package]] name = "wasm-streams" @@ -4197,9 +4207,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.76" +version = "0.3.77" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04dd7223427d52553d3702c004d3b2fe07c148165faa56313cb00211e31c12bc" +checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2" dependencies = [ "js-sys", "wasm-bindgen", @@ -4251,9 +4261,9 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.26.8" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2210b291f7ea53617fbafcc4939f10914214ec15aace5ba62293a668f322c5c9" +checksum = "7e8983c3ab33d6fb807cfcdad2491c4ea8cbc8ed839181c7dfd9c67c83e261b2" dependencies = [ "rustls-pki-types", ] @@ -4267,7 +4277,7 @@ dependencies = [ "webview2-com-macros", "webview2-com-sys", "windows", - "windows-core 0.61.0", + "windows-core", "windows-implement", "windows-interface", ] @@ -4280,7 +4290,7 @@ checksum = "1d228f15bba3b9d56dde8bddbee66fa24545bd17b48d5128ccf4a8742b18e431" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] @@ -4289,9 +4299,9 @@ version = "0.38.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "36695906a1b53a3bf5c4289621efedac12b73eeb0b89e7e1a89b517302d5d75c" dependencies = [ - "thiserror 2.0.7", + "thiserror 2.0.16", "windows", - "windows-core 0.61.0", + "windows-core", ] [[package]] @@ -4312,11 +4322,11 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" +checksum = "0978bf7171b3d90bac376700cb56d606feb40f251a475a5d6634613564460b22" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.60.2", ] [[package]] @@ -4331,10 +4341,10 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9bec5a31f3f9362f2258fd0e9c9dd61a9ca432e7306cc78c444258f0dce9a9c" dependencies = [ - "objc2 0.6.0", + "objc2 0.6.2", "objc2-app-kit", "objc2-core-foundation", - "objc2-foundation 0.3.0", + "objc2-foundation 0.3.1", "raw-window-handle", "windows-sys 0.59.0", "windows-version", @@ -4342,14 +4352,14 @@ dependencies = [ [[package]] name = "windows" -version = "0.61.1" +version = "0.61.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5ee8f3d025738cb02bad7868bbb5f8a6327501e870bf51f1b455b0a2454a419" +checksum = "9babd3a767a4c1aef6900409f85f5d53ce2544ccdfaa86dad48c91782c6d6893" dependencies = [ "windows-collections", - "windows-core 0.61.0", + "windows-core", "windows-future", - "windows-link", + "windows-link 0.1.3", "windows-numerics", ] @@ -4359,39 +4369,31 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3beeceb5e5cfd9eb1d76b381630e82c4241ccd0d27f1a39ed41b2760b255c5e8" dependencies = [ - "windows-core 0.61.0", + "windows-core", ] [[package]] name = "windows-core" -version = "0.52.0" +version = "0.61.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" -dependencies = [ - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-core" -version = "0.61.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4763c1de310c86d75a878046489e2e5ba02c649d185f21c67d4cf8a56d098980" +checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3" dependencies = [ "windows-implement", "windows-interface", - "windows-link", - "windows-result 0.3.2", - "windows-strings 0.4.0", + "windows-link 0.1.3", + "windows-result", + "windows-strings", ] [[package]] name = "windows-future" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a1d6bbefcb7b60acd19828e1bc965da6fcf18a7e39490c5f8be71e54a19ba32" +checksum = "fc6a41e98427b19fe4b73c550f060b59fa592d7d686537eebf9385621bfbad8e" dependencies = [ - "windows-core 0.61.0", - "windows-link", + "windows-core", + "windows-link 0.1.3", + "windows-threading", ] [[package]] @@ -4402,7 +4404,7 @@ checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] @@ -4413,107 +4415,92 @@ checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] name = "windows-link" -version = "0.1.1" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38" +checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" [[package]] -name = "windows-numerics" -version = "0.2.0" +name = "windows-link" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9150af68066c4c5c07ddc0ce30421554771e528bde427614c61038bc2c92c2b1" -dependencies = [ - "windows-core 0.61.0", - "windows-link", -] +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" [[package]] -name = "windows-registry" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e400001bb720a623c1c69032f8e3e4cf09984deec740f007dd2b03ec864804b0" -dependencies = [ - "windows-result 0.2.0", - "windows-strings 0.1.0", - "windows-targets 0.52.6", -] - -[[package]] -name = "windows-result" +name = "windows-numerics" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d1043d8214f791817bab27572aaa8af63732e11bf84aa21a45a78d6c317ae0e" +checksum = "9150af68066c4c5c07ddc0ce30421554771e528bde427614c61038bc2c92c2b1" dependencies = [ - "windows-targets 0.52.6", + "windows-core", + "windows-link 0.1.3", ] [[package]] name = "windows-result" -version = "0.3.2" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c64fd11a4fd95df68efcfee5f44a294fe71b8bc6a91993e2791938abcc712252" +checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" dependencies = [ - "windows-link", + "windows-link 0.1.3", ] [[package]] name = "windows-strings" -version = "0.1.0" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cd9b125c486025df0eabcb585e62173c6c9eddcec5d117d3b6e8c30e2ee4d10" +checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" dependencies = [ - "windows-result 0.2.0", - "windows-targets 0.52.6", + "windows-link 0.1.3", ] [[package]] -name = "windows-strings" -version = "0.4.0" +name = "windows-sys" +version = "0.45.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2ba9642430ee452d5a7aa78d72907ebe8cfda358e8cb7918a2050581322f97" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" dependencies = [ - "windows-link", + "windows-targets 0.42.2", ] [[package]] name = "windows-sys" -version = "0.45.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows-targets 0.42.2", + "windows-targets 0.52.6", ] [[package]] name = "windows-sys" -version = "0.48.0" +version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" dependencies = [ - "windows-targets 0.48.5", + "windows-targets 0.52.6", ] [[package]] name = "windows-sys" -version = "0.52.0" +version = "0.60.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" dependencies = [ - "windows-targets 0.52.6", + "windows-targets 0.53.3", ] [[package]] name = "windows-sys" -version = "0.59.0" +version = "0.61.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" dependencies = [ - "windows-targets 0.52.6", + "windows-link 0.2.1", ] [[package]] @@ -4531,21 +4518,6 @@ dependencies = [ "windows_x86_64_msvc 0.42.2", ] -[[package]] -name = "windows-targets" -version = "0.48.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" -dependencies = [ - "windows_aarch64_gnullvm 0.48.5", - "windows_aarch64_msvc 0.48.5", - "windows_i686_gnu 0.48.5", - "windows_i686_msvc 0.48.5", - "windows_x86_64_gnu 0.48.5", - "windows_x86_64_gnullvm 0.48.5", - "windows_x86_64_msvc 0.48.5", -] - [[package]] name = "windows-targets" version = "0.52.6" @@ -4555,20 +4527,46 @@ dependencies = [ "windows_aarch64_gnullvm 0.52.6", "windows_aarch64_msvc 0.52.6", "windows_i686_gnu 0.52.6", - "windows_i686_gnullvm", + "windows_i686_gnullvm 0.52.6", "windows_i686_msvc 0.52.6", "windows_x86_64_gnu 0.52.6", "windows_x86_64_gnullvm 0.52.6", "windows_x86_64_msvc 0.52.6", ] +[[package]] +name = "windows-targets" +version = "0.53.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5fe6031c4041849d7c496a8ded650796e7b6ecc19df1a431c1a363342e5dc91" +dependencies = [ + "windows-link 0.1.3", + "windows_aarch64_gnullvm 0.53.0", + "windows_aarch64_msvc 0.53.0", + "windows_i686_gnu 0.53.0", + "windows_i686_gnullvm 0.53.0", + "windows_i686_msvc 0.53.0", + "windows_x86_64_gnu 0.53.0", + "windows_x86_64_gnullvm 0.53.0", + "windows_x86_64_msvc 0.53.0", +] + +[[package]] +name = "windows-threading" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b66463ad2e0ea3bbf808b7f1d371311c80e115c0b71d60efc142cafbcfb057a6" +dependencies = [ + "windows-link 0.1.3", +] + [[package]] name = "windows-version" -version = "0.1.1" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6998aa457c9ba8ff2fb9f13e9d2a930dabcea28f1d0ab94d687d8b3654844515" +checksum = "e04a5c6627e310a23ad2358483286c7df260c964eb2d003d8efd6d0f4e79265c" dependencies = [ - "windows-targets 0.52.6", + "windows-link 0.1.3", ] [[package]] @@ -4579,15 +4577,15 @@ checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" [[package]] name = "windows_aarch64_gnullvm" -version = "0.48.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_gnullvm" -version = "0.52.6" +version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" +checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764" [[package]] name = "windows_aarch64_msvc" @@ -4597,15 +4595,15 @@ checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" [[package]] name = "windows_aarch64_msvc" -version = "0.48.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_aarch64_msvc" -version = "0.52.6" +version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" +checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c" [[package]] name = "windows_i686_gnu" @@ -4615,15 +4613,15 @@ checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" [[package]] name = "windows_i686_gnu" -version = "0.48.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] name = "windows_i686_gnu" -version = "0.52.6" +version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" +checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3" [[package]] name = "windows_i686_gnullvm" @@ -4631,6 +4629,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" +[[package]] +name = "windows_i686_gnullvm" +version = "0.53.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11" + [[package]] name = "windows_i686_msvc" version = "0.42.2" @@ -4639,15 +4643,15 @@ checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" [[package]] name = "windows_i686_msvc" -version = "0.48.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_i686_msvc" -version = "0.52.6" +version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" +checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d" [[package]] name = "windows_x86_64_gnu" @@ -4657,15 +4661,15 @@ checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" [[package]] name = "windows_x86_64_gnu" -version = "0.48.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnu" -version = "0.52.6" +version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" +checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba" [[package]] name = "windows_x86_64_gnullvm" @@ -4675,15 +4679,15 @@ checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" [[package]] name = "windows_x86_64_gnullvm" -version = "0.48.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_gnullvm" -version = "0.52.6" +version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" +checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57" [[package]] name = "windows_x86_64_msvc" @@ -4693,15 +4697,15 @@ checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" [[package]] name = "windows_x86_64_msvc" -version = "0.48.5" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "windows_x86_64_msvc" -version = "0.52.6" +version = "0.53.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" +checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" [[package]] name = "winnow" @@ -4714,21 +4718,18 @@ dependencies = [ [[package]] name = "winnow" -version = "0.7.0" +version = "0.7.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e49d2d35d3fad69b39b94139037ecfb4f359f08958b9c11e7315ce770462419" -dependencies = [ - "memchr", -] +checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf" [[package]] name = "winreg" -version = "0.52.0" +version = "0.55.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a277a57398d4bfa075df44f501a17cfdf8542d224f0d36095a2adc7aee4ef0a5" +checksum = "cb5a765337c50e9ec252c2069be9bf91c7df47afb103b642ba3a53bf8101be97" dependencies = [ "cfg-if", - "windows-sys 0.48.0", + "windows-sys 0.59.0", ] [[package]] @@ -4737,46 +4738,41 @@ version = "0.39.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" dependencies = [ - "bitflags 2.6.0", + "bitflags 2.9.3", ] -[[package]] -name = "write16" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" - [[package]] name = "writeable" -version = "0.5.5" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" +checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" [[package]] name = "wry" -version = "0.52.1" +version = "0.53.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12a714d9ba7075aae04a6e50229d6109e3d584774b99a6a8c60de1698ca111b9" +checksum = "728b7d4c8ec8d81cab295e0b5b8a4c263c0d41a785fb8f8c4df284e5411140a2" dependencies = [ "base64 0.22.1", - "block2 0.6.0", + "block2 0.6.1", "cookie", "crossbeam-channel", + "dirs", "dpi", "dunce", "gdkx11", "gtk", "html5ever", - "http 1.2.0", + "http", "javascriptcore-rs", "jni", "kuchikiki", "libc", "ndk", - "objc2 0.6.0", + "objc2 0.6.2", "objc2-app-kit", "objc2-core-foundation", - "objc2-foundation 0.3.0", + "objc2-foundation 0.3.1", "objc2-ui-kit", "objc2-web-kit", "once_cell", @@ -4785,13 +4781,13 @@ dependencies = [ "sha2", "soup3", "tao-macros", - "thiserror 2.0.7", + "thiserror 2.0.16", "url", "webkit2gtk", "webkit2gtk-sys", "webview2-com", "windows", - "windows-core 0.61.0", + "windows-core", "windows-version", "x11-dl", ] @@ -4819,9 +4815,9 @@ dependencies = [ [[package]] name = "yoke" -version = "0.7.5" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "120e6aef9aa629e3d4f52dc8cc43a015c7724194c97dfaf45180d2daf2b77f40" +checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc" dependencies = [ "serde", "stable_deref_trait", @@ -4831,55 +4827,54 @@ dependencies = [ [[package]] name = "yoke-derive" -version = "0.7.5" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2380878cad4ac9aac1e2435f3eb4020e8374b5f13c296cb75b4620ff8e229154" +checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", "synstructure", ] [[package]] name = "zerocopy" -version = "0.7.35" +version = "0.8.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" +checksum = "1039dd0d3c310cf05de012d8a39ff557cb0d23087fd44cad61df08fc31907a2f" dependencies = [ - "byteorder", "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.7.35" +version = "0.8.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" +checksum = "9ecf5b4cc5364572d7f4c329661bcc82724222973f2cab6f050a4e5c22f75181" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] [[package]] name = "zerofrom" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cff3ee08c995dee1859d998dea82f7374f2826091dd9cd47def953cae446cd2e" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" dependencies = [ "zerofrom-derive", ] [[package]] name = "zerofrom-derive" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", "synstructure", ] @@ -4889,11 +4884,22 @@ version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" +[[package]] +name = "zerotrie" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595" +dependencies = [ + "displaydoc", + "yoke", + "zerofrom", +] + [[package]] name = "zerovec" -version = "0.10.4" +version = "0.11.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" +checksum = "e7aa2bd55086f1ab526693ecbe444205da57e25f4489879da80635a46d90e73b" dependencies = [ "yoke", "zerofrom", @@ -4902,11 +4908,11 @@ dependencies = [ [[package]] name = "zerovec-derive" -version = "0.10.3" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" +checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.90", + "syn 2.0.106", ] diff --git a/web/src-tauri/Cargo.toml b/web/src-tauri/Cargo.toml index 4067c367..e7ebf7ff 100644 --- a/web/src-tauri/Cargo.toml +++ b/web/src-tauri/Cargo.toml @@ -7,23 +7,23 @@ license = "" repository = "" default-run = "app" edition = "2021" -rust-version = "1.86" +rust-version = "1.89" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [build-dependencies] -tauri-build = { version = "2.3.0", features = [] } +tauri-build = { version = "2.5.1", features = [] } [dependencies] -serde_json = "1.0.140" -serde = { version = "1.0.219", features = ["derive"] } -tauri = { version = "2.6.2", features = ["tray-icon"] } +serde_json = "1.0.145" +serde = { version = "1.0.228", features = ["derive"] } +tauri = { version = "2.9.2", features = ["tray-icon"] } directories = "6.0.0" dirs = "6.0.0" # reqwest = { version = "0.12.5", features = ["blocking", "json"] } -tokio = { version = "1.46.1", features = ["full"] } -warp = "0.3.7" -ureq = "=3.0.12" +tokio = { version = "1.48.0", features = ["full"] } +warp = { version = "0.4.2", features = ["server"] } +ureq = "=3.1.2" [features] diff --git a/web/src-tauri/gen/schemas/acl-manifests.json b/web/src-tauri/gen/schemas/acl-manifests.json index 92b16267..43da9ef6 100644 --- a/web/src-tauri/gen/schemas/acl-manifests.json +++ b/web/src-tauri/gen/schemas/acl-manifests.json @@ -1 +1 @@ -{"core":{"default_permission":{"identifier":"default","description":"Default core plugins set.","permissions":["core:path:default","core:event:default","core:window:default","core:webview:default","core:app:default","core:image:default","core:resources:default","core:menu:default","core:tray:default"]},"permissions":{},"permission_sets":{},"global_scope_schema":null},"core:app":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin.","permissions":["allow-version","allow-name","allow-tauri-version","allow-identifier"]},"permissions":{"allow-app-hide":{"identifier":"allow-app-hide","description":"Enables the app_hide command without any pre-configured scope.","commands":{"allow":["app_hide"],"deny":[]}},"allow-app-show":{"identifier":"allow-app-show","description":"Enables the app_show command without any pre-configured scope.","commands":{"allow":["app_show"],"deny":[]}},"allow-default-window-icon":{"identifier":"allow-default-window-icon","description":"Enables the default_window_icon command without any pre-configured scope.","commands":{"allow":["default_window_icon"],"deny":[]}},"allow-fetch-data-store-identifiers":{"identifier":"allow-fetch-data-store-identifiers","description":"Enables the fetch_data_store_identifiers command without any pre-configured scope.","commands":{"allow":["fetch_data_store_identifiers"],"deny":[]}},"allow-identifier":{"identifier":"allow-identifier","description":"Enables the identifier command without any pre-configured scope.","commands":{"allow":["identifier"],"deny":[]}},"allow-name":{"identifier":"allow-name","description":"Enables the name command without any pre-configured scope.","commands":{"allow":["name"],"deny":[]}},"allow-remove-data-store":{"identifier":"allow-remove-data-store","description":"Enables the remove_data_store command without any pre-configured scope.","commands":{"allow":["remove_data_store"],"deny":[]}},"allow-set-app-theme":{"identifier":"allow-set-app-theme","description":"Enables the set_app_theme command without any pre-configured scope.","commands":{"allow":["set_app_theme"],"deny":[]}},"allow-set-dock-visibility":{"identifier":"allow-set-dock-visibility","description":"Enables the set_dock_visibility command without any pre-configured scope.","commands":{"allow":["set_dock_visibility"],"deny":[]}},"allow-tauri-version":{"identifier":"allow-tauri-version","description":"Enables the tauri_version command without any pre-configured scope.","commands":{"allow":["tauri_version"],"deny":[]}},"allow-version":{"identifier":"allow-version","description":"Enables the version command without any pre-configured scope.","commands":{"allow":["version"],"deny":[]}},"deny-app-hide":{"identifier":"deny-app-hide","description":"Denies the app_hide command without any pre-configured scope.","commands":{"allow":[],"deny":["app_hide"]}},"deny-app-show":{"identifier":"deny-app-show","description":"Denies the app_show command without any pre-configured scope.","commands":{"allow":[],"deny":["app_show"]}},"deny-default-window-icon":{"identifier":"deny-default-window-icon","description":"Denies the default_window_icon command without any pre-configured scope.","commands":{"allow":[],"deny":["default_window_icon"]}},"deny-fetch-data-store-identifiers":{"identifier":"deny-fetch-data-store-identifiers","description":"Denies the fetch_data_store_identifiers command without any pre-configured scope.","commands":{"allow":[],"deny":["fetch_data_store_identifiers"]}},"deny-identifier":{"identifier":"deny-identifier","description":"Denies the identifier command without any pre-configured scope.","commands":{"allow":[],"deny":["identifier"]}},"deny-name":{"identifier":"deny-name","description":"Denies the name command without any pre-configured scope.","commands":{"allow":[],"deny":["name"]}},"deny-remove-data-store":{"identifier":"deny-remove-data-store","description":"Denies the remove_data_store command without any pre-configured scope.","commands":{"allow":[],"deny":["remove_data_store"]}},"deny-set-app-theme":{"identifier":"deny-set-app-theme","description":"Denies the set_app_theme command without any pre-configured scope.","commands":{"allow":[],"deny":["set_app_theme"]}},"deny-set-dock-visibility":{"identifier":"deny-set-dock-visibility","description":"Denies the set_dock_visibility command without any pre-configured scope.","commands":{"allow":[],"deny":["set_dock_visibility"]}},"deny-tauri-version":{"identifier":"deny-tauri-version","description":"Denies the tauri_version command without any pre-configured scope.","commands":{"allow":[],"deny":["tauri_version"]}},"deny-version":{"identifier":"deny-version","description":"Denies the version command without any pre-configured scope.","commands":{"allow":[],"deny":["version"]}}},"permission_sets":{},"global_scope_schema":null},"core:event":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin, which enables all commands.","permissions":["allow-listen","allow-unlisten","allow-emit","allow-emit-to"]},"permissions":{"allow-emit":{"identifier":"allow-emit","description":"Enables the emit command without any pre-configured scope.","commands":{"allow":["emit"],"deny":[]}},"allow-emit-to":{"identifier":"allow-emit-to","description":"Enables the emit_to command without any pre-configured scope.","commands":{"allow":["emit_to"],"deny":[]}},"allow-listen":{"identifier":"allow-listen","description":"Enables the listen command without any pre-configured scope.","commands":{"allow":["listen"],"deny":[]}},"allow-unlisten":{"identifier":"allow-unlisten","description":"Enables the unlisten command without any pre-configured scope.","commands":{"allow":["unlisten"],"deny":[]}},"deny-emit":{"identifier":"deny-emit","description":"Denies the emit command without any pre-configured scope.","commands":{"allow":[],"deny":["emit"]}},"deny-emit-to":{"identifier":"deny-emit-to","description":"Denies the emit_to command without any pre-configured scope.","commands":{"allow":[],"deny":["emit_to"]}},"deny-listen":{"identifier":"deny-listen","description":"Denies the listen command without any pre-configured scope.","commands":{"allow":[],"deny":["listen"]}},"deny-unlisten":{"identifier":"deny-unlisten","description":"Denies the unlisten command without any pre-configured scope.","commands":{"allow":[],"deny":["unlisten"]}}},"permission_sets":{},"global_scope_schema":null},"core:image":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin, which enables all commands.","permissions":["allow-new","allow-from-bytes","allow-from-path","allow-rgba","allow-size"]},"permissions":{"allow-from-bytes":{"identifier":"allow-from-bytes","description":"Enables the from_bytes command without any pre-configured scope.","commands":{"allow":["from_bytes"],"deny":[]}},"allow-from-path":{"identifier":"allow-from-path","description":"Enables the from_path command without any pre-configured scope.","commands":{"allow":["from_path"],"deny":[]}},"allow-new":{"identifier":"allow-new","description":"Enables the new command without any pre-configured scope.","commands":{"allow":["new"],"deny":[]}},"allow-rgba":{"identifier":"allow-rgba","description":"Enables the rgba command without any pre-configured scope.","commands":{"allow":["rgba"],"deny":[]}},"allow-size":{"identifier":"allow-size","description":"Enables the size command without any pre-configured scope.","commands":{"allow":["size"],"deny":[]}},"deny-from-bytes":{"identifier":"deny-from-bytes","description":"Denies the from_bytes command without any pre-configured scope.","commands":{"allow":[],"deny":["from_bytes"]}},"deny-from-path":{"identifier":"deny-from-path","description":"Denies the from_path command without any pre-configured scope.","commands":{"allow":[],"deny":["from_path"]}},"deny-new":{"identifier":"deny-new","description":"Denies the new command without any pre-configured scope.","commands":{"allow":[],"deny":["new"]}},"deny-rgba":{"identifier":"deny-rgba","description":"Denies the rgba command without any pre-configured scope.","commands":{"allow":[],"deny":["rgba"]}},"deny-size":{"identifier":"deny-size","description":"Denies the size command without any pre-configured scope.","commands":{"allow":[],"deny":["size"]}}},"permission_sets":{},"global_scope_schema":null},"core:menu":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin, which enables all commands.","permissions":["allow-new","allow-append","allow-prepend","allow-insert","allow-remove","allow-remove-at","allow-items","allow-get","allow-popup","allow-create-default","allow-set-as-app-menu","allow-set-as-window-menu","allow-text","allow-set-text","allow-is-enabled","allow-set-enabled","allow-set-accelerator","allow-set-as-windows-menu-for-nsapp","allow-set-as-help-menu-for-nsapp","allow-is-checked","allow-set-checked","allow-set-icon"]},"permissions":{"allow-append":{"identifier":"allow-append","description":"Enables the append command without any pre-configured scope.","commands":{"allow":["append"],"deny":[]}},"allow-create-default":{"identifier":"allow-create-default","description":"Enables the create_default command without any pre-configured scope.","commands":{"allow":["create_default"],"deny":[]}},"allow-get":{"identifier":"allow-get","description":"Enables the get command without any pre-configured scope.","commands":{"allow":["get"],"deny":[]}},"allow-insert":{"identifier":"allow-insert","description":"Enables the insert command without any pre-configured scope.","commands":{"allow":["insert"],"deny":[]}},"allow-is-checked":{"identifier":"allow-is-checked","description":"Enables the is_checked command without any pre-configured scope.","commands":{"allow":["is_checked"],"deny":[]}},"allow-is-enabled":{"identifier":"allow-is-enabled","description":"Enables the is_enabled command without any pre-configured scope.","commands":{"allow":["is_enabled"],"deny":[]}},"allow-items":{"identifier":"allow-items","description":"Enables the items command without any pre-configured scope.","commands":{"allow":["items"],"deny":[]}},"allow-new":{"identifier":"allow-new","description":"Enables the new command without any pre-configured scope.","commands":{"allow":["new"],"deny":[]}},"allow-popup":{"identifier":"allow-popup","description":"Enables the popup command without any pre-configured scope.","commands":{"allow":["popup"],"deny":[]}},"allow-prepend":{"identifier":"allow-prepend","description":"Enables the prepend command without any pre-configured scope.","commands":{"allow":["prepend"],"deny":[]}},"allow-remove":{"identifier":"allow-remove","description":"Enables the remove command without any pre-configured scope.","commands":{"allow":["remove"],"deny":[]}},"allow-remove-at":{"identifier":"allow-remove-at","description":"Enables the remove_at command without any pre-configured scope.","commands":{"allow":["remove_at"],"deny":[]}},"allow-set-accelerator":{"identifier":"allow-set-accelerator","description":"Enables the set_accelerator command without any pre-configured scope.","commands":{"allow":["set_accelerator"],"deny":[]}},"allow-set-as-app-menu":{"identifier":"allow-set-as-app-menu","description":"Enables the set_as_app_menu command without any pre-configured scope.","commands":{"allow":["set_as_app_menu"],"deny":[]}},"allow-set-as-help-menu-for-nsapp":{"identifier":"allow-set-as-help-menu-for-nsapp","description":"Enables the set_as_help_menu_for_nsapp command without any pre-configured scope.","commands":{"allow":["set_as_help_menu_for_nsapp"],"deny":[]}},"allow-set-as-window-menu":{"identifier":"allow-set-as-window-menu","description":"Enables the set_as_window_menu command without any pre-configured scope.","commands":{"allow":["set_as_window_menu"],"deny":[]}},"allow-set-as-windows-menu-for-nsapp":{"identifier":"allow-set-as-windows-menu-for-nsapp","description":"Enables the set_as_windows_menu_for_nsapp command without any pre-configured scope.","commands":{"allow":["set_as_windows_menu_for_nsapp"],"deny":[]}},"allow-set-checked":{"identifier":"allow-set-checked","description":"Enables the set_checked command without any pre-configured scope.","commands":{"allow":["set_checked"],"deny":[]}},"allow-set-enabled":{"identifier":"allow-set-enabled","description":"Enables the set_enabled command without any pre-configured scope.","commands":{"allow":["set_enabled"],"deny":[]}},"allow-set-icon":{"identifier":"allow-set-icon","description":"Enables the set_icon command without any pre-configured scope.","commands":{"allow":["set_icon"],"deny":[]}},"allow-set-text":{"identifier":"allow-set-text","description":"Enables the set_text command without any pre-configured scope.","commands":{"allow":["set_text"],"deny":[]}},"allow-text":{"identifier":"allow-text","description":"Enables the text command without any pre-configured scope.","commands":{"allow":["text"],"deny":[]}},"deny-append":{"identifier":"deny-append","description":"Denies the append command without any pre-configured scope.","commands":{"allow":[],"deny":["append"]}},"deny-create-default":{"identifier":"deny-create-default","description":"Denies the create_default command without any pre-configured scope.","commands":{"allow":[],"deny":["create_default"]}},"deny-get":{"identifier":"deny-get","description":"Denies the get command without any pre-configured scope.","commands":{"allow":[],"deny":["get"]}},"deny-insert":{"identifier":"deny-insert","description":"Denies the insert command without any pre-configured scope.","commands":{"allow":[],"deny":["insert"]}},"deny-is-checked":{"identifier":"deny-is-checked","description":"Denies the is_checked command without any pre-configured scope.","commands":{"allow":[],"deny":["is_checked"]}},"deny-is-enabled":{"identifier":"deny-is-enabled","description":"Denies the is_enabled command without any pre-configured scope.","commands":{"allow":[],"deny":["is_enabled"]}},"deny-items":{"identifier":"deny-items","description":"Denies the items command without any pre-configured scope.","commands":{"allow":[],"deny":["items"]}},"deny-new":{"identifier":"deny-new","description":"Denies the new command without any pre-configured scope.","commands":{"allow":[],"deny":["new"]}},"deny-popup":{"identifier":"deny-popup","description":"Denies the popup command without any pre-configured scope.","commands":{"allow":[],"deny":["popup"]}},"deny-prepend":{"identifier":"deny-prepend","description":"Denies the prepend command without any pre-configured scope.","commands":{"allow":[],"deny":["prepend"]}},"deny-remove":{"identifier":"deny-remove","description":"Denies the remove command without any pre-configured scope.","commands":{"allow":[],"deny":["remove"]}},"deny-remove-at":{"identifier":"deny-remove-at","description":"Denies the remove_at command without any pre-configured scope.","commands":{"allow":[],"deny":["remove_at"]}},"deny-set-accelerator":{"identifier":"deny-set-accelerator","description":"Denies the set_accelerator command without any pre-configured scope.","commands":{"allow":[],"deny":["set_accelerator"]}},"deny-set-as-app-menu":{"identifier":"deny-set-as-app-menu","description":"Denies the set_as_app_menu command without any pre-configured scope.","commands":{"allow":[],"deny":["set_as_app_menu"]}},"deny-set-as-help-menu-for-nsapp":{"identifier":"deny-set-as-help-menu-for-nsapp","description":"Denies the set_as_help_menu_for_nsapp command without any pre-configured scope.","commands":{"allow":[],"deny":["set_as_help_menu_for_nsapp"]}},"deny-set-as-window-menu":{"identifier":"deny-set-as-window-menu","description":"Denies the set_as_window_menu command without any pre-configured scope.","commands":{"allow":[],"deny":["set_as_window_menu"]}},"deny-set-as-windows-menu-for-nsapp":{"identifier":"deny-set-as-windows-menu-for-nsapp","description":"Denies the set_as_windows_menu_for_nsapp command without any pre-configured scope.","commands":{"allow":[],"deny":["set_as_windows_menu_for_nsapp"]}},"deny-set-checked":{"identifier":"deny-set-checked","description":"Denies the set_checked command without any pre-configured scope.","commands":{"allow":[],"deny":["set_checked"]}},"deny-set-enabled":{"identifier":"deny-set-enabled","description":"Denies the set_enabled command without any pre-configured scope.","commands":{"allow":[],"deny":["set_enabled"]}},"deny-set-icon":{"identifier":"deny-set-icon","description":"Denies the set_icon command without any pre-configured scope.","commands":{"allow":[],"deny":["set_icon"]}},"deny-set-text":{"identifier":"deny-set-text","description":"Denies the set_text command without any pre-configured scope.","commands":{"allow":[],"deny":["set_text"]}},"deny-text":{"identifier":"deny-text","description":"Denies the text command without any pre-configured scope.","commands":{"allow":[],"deny":["text"]}}},"permission_sets":{},"global_scope_schema":null},"core:path":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin, which enables all commands.","permissions":["allow-resolve-directory","allow-resolve","allow-normalize","allow-join","allow-dirname","allow-extname","allow-basename","allow-is-absolute"]},"permissions":{"allow-basename":{"identifier":"allow-basename","description":"Enables the basename command without any pre-configured scope.","commands":{"allow":["basename"],"deny":[]}},"allow-dirname":{"identifier":"allow-dirname","description":"Enables the dirname command without any pre-configured scope.","commands":{"allow":["dirname"],"deny":[]}},"allow-extname":{"identifier":"allow-extname","description":"Enables the extname command without any pre-configured scope.","commands":{"allow":["extname"],"deny":[]}},"allow-is-absolute":{"identifier":"allow-is-absolute","description":"Enables the is_absolute command without any pre-configured scope.","commands":{"allow":["is_absolute"],"deny":[]}},"allow-join":{"identifier":"allow-join","description":"Enables the join command without any pre-configured scope.","commands":{"allow":["join"],"deny":[]}},"allow-normalize":{"identifier":"allow-normalize","description":"Enables the normalize command without any pre-configured scope.","commands":{"allow":["normalize"],"deny":[]}},"allow-resolve":{"identifier":"allow-resolve","description":"Enables the resolve command without any pre-configured scope.","commands":{"allow":["resolve"],"deny":[]}},"allow-resolve-directory":{"identifier":"allow-resolve-directory","description":"Enables the resolve_directory command without any pre-configured scope.","commands":{"allow":["resolve_directory"],"deny":[]}},"deny-basename":{"identifier":"deny-basename","description":"Denies the basename command without any pre-configured scope.","commands":{"allow":[],"deny":["basename"]}},"deny-dirname":{"identifier":"deny-dirname","description":"Denies the dirname command without any pre-configured scope.","commands":{"allow":[],"deny":["dirname"]}},"deny-extname":{"identifier":"deny-extname","description":"Denies the extname command without any pre-configured scope.","commands":{"allow":[],"deny":["extname"]}},"deny-is-absolute":{"identifier":"deny-is-absolute","description":"Denies the is_absolute command without any pre-configured scope.","commands":{"allow":[],"deny":["is_absolute"]}},"deny-join":{"identifier":"deny-join","description":"Denies the join command without any pre-configured scope.","commands":{"allow":[],"deny":["join"]}},"deny-normalize":{"identifier":"deny-normalize","description":"Denies the normalize command without any pre-configured scope.","commands":{"allow":[],"deny":["normalize"]}},"deny-resolve":{"identifier":"deny-resolve","description":"Denies the resolve command without any pre-configured scope.","commands":{"allow":[],"deny":["resolve"]}},"deny-resolve-directory":{"identifier":"deny-resolve-directory","description":"Denies the resolve_directory command without any pre-configured scope.","commands":{"allow":[],"deny":["resolve_directory"]}}},"permission_sets":{},"global_scope_schema":null},"core:resources":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin, which enables all commands.","permissions":["allow-close"]},"permissions":{"allow-close":{"identifier":"allow-close","description":"Enables the close command without any pre-configured scope.","commands":{"allow":["close"],"deny":[]}},"deny-close":{"identifier":"deny-close","description":"Denies the close command without any pre-configured scope.","commands":{"allow":[],"deny":["close"]}}},"permission_sets":{},"global_scope_schema":null},"core:tray":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin, which enables all commands.","permissions":["allow-new","allow-get-by-id","allow-remove-by-id","allow-set-icon","allow-set-menu","allow-set-tooltip","allow-set-title","allow-set-visible","allow-set-temp-dir-path","allow-set-icon-as-template","allow-set-show-menu-on-left-click"]},"permissions":{"allow-get-by-id":{"identifier":"allow-get-by-id","description":"Enables the get_by_id command without any pre-configured scope.","commands":{"allow":["get_by_id"],"deny":[]}},"allow-new":{"identifier":"allow-new","description":"Enables the new command without any pre-configured scope.","commands":{"allow":["new"],"deny":[]}},"allow-remove-by-id":{"identifier":"allow-remove-by-id","description":"Enables the remove_by_id command without any pre-configured scope.","commands":{"allow":["remove_by_id"],"deny":[]}},"allow-set-icon":{"identifier":"allow-set-icon","description":"Enables the set_icon command without any pre-configured scope.","commands":{"allow":["set_icon"],"deny":[]}},"allow-set-icon-as-template":{"identifier":"allow-set-icon-as-template","description":"Enables the set_icon_as_template command without any pre-configured scope.","commands":{"allow":["set_icon_as_template"],"deny":[]}},"allow-set-menu":{"identifier":"allow-set-menu","description":"Enables the set_menu command without any pre-configured scope.","commands":{"allow":["set_menu"],"deny":[]}},"allow-set-show-menu-on-left-click":{"identifier":"allow-set-show-menu-on-left-click","description":"Enables the set_show_menu_on_left_click command without any pre-configured scope.","commands":{"allow":["set_show_menu_on_left_click"],"deny":[]}},"allow-set-temp-dir-path":{"identifier":"allow-set-temp-dir-path","description":"Enables the set_temp_dir_path command without any pre-configured scope.","commands":{"allow":["set_temp_dir_path"],"deny":[]}},"allow-set-title":{"identifier":"allow-set-title","description":"Enables the set_title command without any pre-configured scope.","commands":{"allow":["set_title"],"deny":[]}},"allow-set-tooltip":{"identifier":"allow-set-tooltip","description":"Enables the set_tooltip command without any pre-configured scope.","commands":{"allow":["set_tooltip"],"deny":[]}},"allow-set-visible":{"identifier":"allow-set-visible","description":"Enables the set_visible command without any pre-configured scope.","commands":{"allow":["set_visible"],"deny":[]}},"deny-get-by-id":{"identifier":"deny-get-by-id","description":"Denies the get_by_id command without any pre-configured scope.","commands":{"allow":[],"deny":["get_by_id"]}},"deny-new":{"identifier":"deny-new","description":"Denies the new command without any pre-configured scope.","commands":{"allow":[],"deny":["new"]}},"deny-remove-by-id":{"identifier":"deny-remove-by-id","description":"Denies the remove_by_id command without any pre-configured scope.","commands":{"allow":[],"deny":["remove_by_id"]}},"deny-set-icon":{"identifier":"deny-set-icon","description":"Denies the set_icon command without any pre-configured scope.","commands":{"allow":[],"deny":["set_icon"]}},"deny-set-icon-as-template":{"identifier":"deny-set-icon-as-template","description":"Denies the set_icon_as_template command without any pre-configured scope.","commands":{"allow":[],"deny":["set_icon_as_template"]}},"deny-set-menu":{"identifier":"deny-set-menu","description":"Denies the set_menu command without any pre-configured scope.","commands":{"allow":[],"deny":["set_menu"]}},"deny-set-show-menu-on-left-click":{"identifier":"deny-set-show-menu-on-left-click","description":"Denies the set_show_menu_on_left_click command without any pre-configured scope.","commands":{"allow":[],"deny":["set_show_menu_on_left_click"]}},"deny-set-temp-dir-path":{"identifier":"deny-set-temp-dir-path","description":"Denies the set_temp_dir_path command without any pre-configured scope.","commands":{"allow":[],"deny":["set_temp_dir_path"]}},"deny-set-title":{"identifier":"deny-set-title","description":"Denies the set_title command without any pre-configured scope.","commands":{"allow":[],"deny":["set_title"]}},"deny-set-tooltip":{"identifier":"deny-set-tooltip","description":"Denies the set_tooltip command without any pre-configured scope.","commands":{"allow":[],"deny":["set_tooltip"]}},"deny-set-visible":{"identifier":"deny-set-visible","description":"Denies the set_visible command without any pre-configured scope.","commands":{"allow":[],"deny":["set_visible"]}}},"permission_sets":{},"global_scope_schema":null},"core:webview":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin.","permissions":["allow-get-all-webviews","allow-webview-position","allow-webview-size","allow-internal-toggle-devtools"]},"permissions":{"allow-clear-all-browsing-data":{"identifier":"allow-clear-all-browsing-data","description":"Enables the clear_all_browsing_data command without any pre-configured scope.","commands":{"allow":["clear_all_browsing_data"],"deny":[]}},"allow-create-webview":{"identifier":"allow-create-webview","description":"Enables the create_webview command without any pre-configured scope.","commands":{"allow":["create_webview"],"deny":[]}},"allow-create-webview-window":{"identifier":"allow-create-webview-window","description":"Enables the create_webview_window command without any pre-configured scope.","commands":{"allow":["create_webview_window"],"deny":[]}},"allow-get-all-webviews":{"identifier":"allow-get-all-webviews","description":"Enables the get_all_webviews command without any pre-configured scope.","commands":{"allow":["get_all_webviews"],"deny":[]}},"allow-internal-toggle-devtools":{"identifier":"allow-internal-toggle-devtools","description":"Enables the internal_toggle_devtools command without any pre-configured scope.","commands":{"allow":["internal_toggle_devtools"],"deny":[]}},"allow-print":{"identifier":"allow-print","description":"Enables the print command without any pre-configured scope.","commands":{"allow":["print"],"deny":[]}},"allow-reparent":{"identifier":"allow-reparent","description":"Enables the reparent command without any pre-configured scope.","commands":{"allow":["reparent"],"deny":[]}},"allow-set-webview-auto-resize":{"identifier":"allow-set-webview-auto-resize","description":"Enables the set_webview_auto_resize command without any pre-configured scope.","commands":{"allow":["set_webview_auto_resize"],"deny":[]}},"allow-set-webview-background-color":{"identifier":"allow-set-webview-background-color","description":"Enables the set_webview_background_color command without any pre-configured scope.","commands":{"allow":["set_webview_background_color"],"deny":[]}},"allow-set-webview-focus":{"identifier":"allow-set-webview-focus","description":"Enables the set_webview_focus command without any pre-configured scope.","commands":{"allow":["set_webview_focus"],"deny":[]}},"allow-set-webview-position":{"identifier":"allow-set-webview-position","description":"Enables the set_webview_position command without any pre-configured scope.","commands":{"allow":["set_webview_position"],"deny":[]}},"allow-set-webview-size":{"identifier":"allow-set-webview-size","description":"Enables the set_webview_size command without any pre-configured scope.","commands":{"allow":["set_webview_size"],"deny":[]}},"allow-set-webview-zoom":{"identifier":"allow-set-webview-zoom","description":"Enables the set_webview_zoom command without any pre-configured scope.","commands":{"allow":["set_webview_zoom"],"deny":[]}},"allow-webview-close":{"identifier":"allow-webview-close","description":"Enables the webview_close command without any pre-configured scope.","commands":{"allow":["webview_close"],"deny":[]}},"allow-webview-hide":{"identifier":"allow-webview-hide","description":"Enables the webview_hide command without any pre-configured scope.","commands":{"allow":["webview_hide"],"deny":[]}},"allow-webview-position":{"identifier":"allow-webview-position","description":"Enables the webview_position command without any pre-configured scope.","commands":{"allow":["webview_position"],"deny":[]}},"allow-webview-show":{"identifier":"allow-webview-show","description":"Enables the webview_show command without any pre-configured scope.","commands":{"allow":["webview_show"],"deny":[]}},"allow-webview-size":{"identifier":"allow-webview-size","description":"Enables the webview_size command without any pre-configured scope.","commands":{"allow":["webview_size"],"deny":[]}},"deny-clear-all-browsing-data":{"identifier":"deny-clear-all-browsing-data","description":"Denies the clear_all_browsing_data command without any pre-configured scope.","commands":{"allow":[],"deny":["clear_all_browsing_data"]}},"deny-create-webview":{"identifier":"deny-create-webview","description":"Denies the create_webview command without any pre-configured scope.","commands":{"allow":[],"deny":["create_webview"]}},"deny-create-webview-window":{"identifier":"deny-create-webview-window","description":"Denies the create_webview_window command without any pre-configured scope.","commands":{"allow":[],"deny":["create_webview_window"]}},"deny-get-all-webviews":{"identifier":"deny-get-all-webviews","description":"Denies the get_all_webviews command without any pre-configured scope.","commands":{"allow":[],"deny":["get_all_webviews"]}},"deny-internal-toggle-devtools":{"identifier":"deny-internal-toggle-devtools","description":"Denies the internal_toggle_devtools command without any pre-configured scope.","commands":{"allow":[],"deny":["internal_toggle_devtools"]}},"deny-print":{"identifier":"deny-print","description":"Denies the print command without any pre-configured scope.","commands":{"allow":[],"deny":["print"]}},"deny-reparent":{"identifier":"deny-reparent","description":"Denies the reparent command without any pre-configured scope.","commands":{"allow":[],"deny":["reparent"]}},"deny-set-webview-auto-resize":{"identifier":"deny-set-webview-auto-resize","description":"Denies the set_webview_auto_resize command without any pre-configured scope.","commands":{"allow":[],"deny":["set_webview_auto_resize"]}},"deny-set-webview-background-color":{"identifier":"deny-set-webview-background-color","description":"Denies the set_webview_background_color command without any pre-configured scope.","commands":{"allow":[],"deny":["set_webview_background_color"]}},"deny-set-webview-focus":{"identifier":"deny-set-webview-focus","description":"Denies the set_webview_focus command without any pre-configured scope.","commands":{"allow":[],"deny":["set_webview_focus"]}},"deny-set-webview-position":{"identifier":"deny-set-webview-position","description":"Denies the set_webview_position command without any pre-configured scope.","commands":{"allow":[],"deny":["set_webview_position"]}},"deny-set-webview-size":{"identifier":"deny-set-webview-size","description":"Denies the set_webview_size command without any pre-configured scope.","commands":{"allow":[],"deny":["set_webview_size"]}},"deny-set-webview-zoom":{"identifier":"deny-set-webview-zoom","description":"Denies the set_webview_zoom command without any pre-configured scope.","commands":{"allow":[],"deny":["set_webview_zoom"]}},"deny-webview-close":{"identifier":"deny-webview-close","description":"Denies the webview_close command without any pre-configured scope.","commands":{"allow":[],"deny":["webview_close"]}},"deny-webview-hide":{"identifier":"deny-webview-hide","description":"Denies the webview_hide command without any pre-configured scope.","commands":{"allow":[],"deny":["webview_hide"]}},"deny-webview-position":{"identifier":"deny-webview-position","description":"Denies the webview_position command without any pre-configured scope.","commands":{"allow":[],"deny":["webview_position"]}},"deny-webview-show":{"identifier":"deny-webview-show","description":"Denies the webview_show command without any pre-configured scope.","commands":{"allow":[],"deny":["webview_show"]}},"deny-webview-size":{"identifier":"deny-webview-size","description":"Denies the webview_size command without any pre-configured scope.","commands":{"allow":[],"deny":["webview_size"]}}},"permission_sets":{},"global_scope_schema":null},"core:window":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin.","permissions":["allow-get-all-windows","allow-scale-factor","allow-inner-position","allow-outer-position","allow-inner-size","allow-outer-size","allow-is-fullscreen","allow-is-minimized","allow-is-maximized","allow-is-focused","allow-is-decorated","allow-is-resizable","allow-is-maximizable","allow-is-minimizable","allow-is-closable","allow-is-visible","allow-is-enabled","allow-title","allow-current-monitor","allow-primary-monitor","allow-monitor-from-point","allow-available-monitors","allow-cursor-position","allow-theme","allow-is-always-on-top","allow-internal-toggle-maximize"]},"permissions":{"allow-available-monitors":{"identifier":"allow-available-monitors","description":"Enables the available_monitors command without any pre-configured scope.","commands":{"allow":["available_monitors"],"deny":[]}},"allow-center":{"identifier":"allow-center","description":"Enables the center command without any pre-configured scope.","commands":{"allow":["center"],"deny":[]}},"allow-close":{"identifier":"allow-close","description":"Enables the close command without any pre-configured scope.","commands":{"allow":["close"],"deny":[]}},"allow-create":{"identifier":"allow-create","description":"Enables the create command without any pre-configured scope.","commands":{"allow":["create"],"deny":[]}},"allow-current-monitor":{"identifier":"allow-current-monitor","description":"Enables the current_monitor command without any pre-configured scope.","commands":{"allow":["current_monitor"],"deny":[]}},"allow-cursor-position":{"identifier":"allow-cursor-position","description":"Enables the cursor_position command without any pre-configured scope.","commands":{"allow":["cursor_position"],"deny":[]}},"allow-destroy":{"identifier":"allow-destroy","description":"Enables the destroy command without any pre-configured scope.","commands":{"allow":["destroy"],"deny":[]}},"allow-get-all-windows":{"identifier":"allow-get-all-windows","description":"Enables the get_all_windows command without any pre-configured scope.","commands":{"allow":["get_all_windows"],"deny":[]}},"allow-hide":{"identifier":"allow-hide","description":"Enables the hide command without any pre-configured scope.","commands":{"allow":["hide"],"deny":[]}},"allow-inner-position":{"identifier":"allow-inner-position","description":"Enables the inner_position command without any pre-configured scope.","commands":{"allow":["inner_position"],"deny":[]}},"allow-inner-size":{"identifier":"allow-inner-size","description":"Enables the inner_size command without any pre-configured scope.","commands":{"allow":["inner_size"],"deny":[]}},"allow-internal-toggle-maximize":{"identifier":"allow-internal-toggle-maximize","description":"Enables the internal_toggle_maximize command without any pre-configured scope.","commands":{"allow":["internal_toggle_maximize"],"deny":[]}},"allow-is-always-on-top":{"identifier":"allow-is-always-on-top","description":"Enables the is_always_on_top command without any pre-configured scope.","commands":{"allow":["is_always_on_top"],"deny":[]}},"allow-is-closable":{"identifier":"allow-is-closable","description":"Enables the is_closable command without any pre-configured scope.","commands":{"allow":["is_closable"],"deny":[]}},"allow-is-decorated":{"identifier":"allow-is-decorated","description":"Enables the is_decorated command without any pre-configured scope.","commands":{"allow":["is_decorated"],"deny":[]}},"allow-is-enabled":{"identifier":"allow-is-enabled","description":"Enables the is_enabled command without any pre-configured scope.","commands":{"allow":["is_enabled"],"deny":[]}},"allow-is-focused":{"identifier":"allow-is-focused","description":"Enables the is_focused command without any pre-configured scope.","commands":{"allow":["is_focused"],"deny":[]}},"allow-is-fullscreen":{"identifier":"allow-is-fullscreen","description":"Enables the is_fullscreen command without any pre-configured scope.","commands":{"allow":["is_fullscreen"],"deny":[]}},"allow-is-maximizable":{"identifier":"allow-is-maximizable","description":"Enables the is_maximizable command without any pre-configured scope.","commands":{"allow":["is_maximizable"],"deny":[]}},"allow-is-maximized":{"identifier":"allow-is-maximized","description":"Enables the is_maximized command without any pre-configured scope.","commands":{"allow":["is_maximized"],"deny":[]}},"allow-is-minimizable":{"identifier":"allow-is-minimizable","description":"Enables the is_minimizable command without any pre-configured scope.","commands":{"allow":["is_minimizable"],"deny":[]}},"allow-is-minimized":{"identifier":"allow-is-minimized","description":"Enables the is_minimized command without any pre-configured scope.","commands":{"allow":["is_minimized"],"deny":[]}},"allow-is-resizable":{"identifier":"allow-is-resizable","description":"Enables the is_resizable command without any pre-configured scope.","commands":{"allow":["is_resizable"],"deny":[]}},"allow-is-visible":{"identifier":"allow-is-visible","description":"Enables the is_visible command without any pre-configured scope.","commands":{"allow":["is_visible"],"deny":[]}},"allow-maximize":{"identifier":"allow-maximize","description":"Enables the maximize command without any pre-configured scope.","commands":{"allow":["maximize"],"deny":[]}},"allow-minimize":{"identifier":"allow-minimize","description":"Enables the minimize command without any pre-configured scope.","commands":{"allow":["minimize"],"deny":[]}},"allow-monitor-from-point":{"identifier":"allow-monitor-from-point","description":"Enables the monitor_from_point command without any pre-configured scope.","commands":{"allow":["monitor_from_point"],"deny":[]}},"allow-outer-position":{"identifier":"allow-outer-position","description":"Enables the outer_position command without any pre-configured scope.","commands":{"allow":["outer_position"],"deny":[]}},"allow-outer-size":{"identifier":"allow-outer-size","description":"Enables the outer_size command without any pre-configured scope.","commands":{"allow":["outer_size"],"deny":[]}},"allow-primary-monitor":{"identifier":"allow-primary-monitor","description":"Enables the primary_monitor command without any pre-configured scope.","commands":{"allow":["primary_monitor"],"deny":[]}},"allow-request-user-attention":{"identifier":"allow-request-user-attention","description":"Enables the request_user_attention command without any pre-configured scope.","commands":{"allow":["request_user_attention"],"deny":[]}},"allow-scale-factor":{"identifier":"allow-scale-factor","description":"Enables the scale_factor command without any pre-configured scope.","commands":{"allow":["scale_factor"],"deny":[]}},"allow-set-always-on-bottom":{"identifier":"allow-set-always-on-bottom","description":"Enables the set_always_on_bottom command without any pre-configured scope.","commands":{"allow":["set_always_on_bottom"],"deny":[]}},"allow-set-always-on-top":{"identifier":"allow-set-always-on-top","description":"Enables the set_always_on_top command without any pre-configured scope.","commands":{"allow":["set_always_on_top"],"deny":[]}},"allow-set-background-color":{"identifier":"allow-set-background-color","description":"Enables the set_background_color command without any pre-configured scope.","commands":{"allow":["set_background_color"],"deny":[]}},"allow-set-badge-count":{"identifier":"allow-set-badge-count","description":"Enables the set_badge_count command without any pre-configured scope.","commands":{"allow":["set_badge_count"],"deny":[]}},"allow-set-badge-label":{"identifier":"allow-set-badge-label","description":"Enables the set_badge_label command without any pre-configured scope.","commands":{"allow":["set_badge_label"],"deny":[]}},"allow-set-closable":{"identifier":"allow-set-closable","description":"Enables the set_closable command without any pre-configured scope.","commands":{"allow":["set_closable"],"deny":[]}},"allow-set-content-protected":{"identifier":"allow-set-content-protected","description":"Enables the set_content_protected command without any pre-configured scope.","commands":{"allow":["set_content_protected"],"deny":[]}},"allow-set-cursor-grab":{"identifier":"allow-set-cursor-grab","description":"Enables the set_cursor_grab command without any pre-configured scope.","commands":{"allow":["set_cursor_grab"],"deny":[]}},"allow-set-cursor-icon":{"identifier":"allow-set-cursor-icon","description":"Enables the set_cursor_icon command without any pre-configured scope.","commands":{"allow":["set_cursor_icon"],"deny":[]}},"allow-set-cursor-position":{"identifier":"allow-set-cursor-position","description":"Enables the set_cursor_position command without any pre-configured scope.","commands":{"allow":["set_cursor_position"],"deny":[]}},"allow-set-cursor-visible":{"identifier":"allow-set-cursor-visible","description":"Enables the set_cursor_visible command without any pre-configured scope.","commands":{"allow":["set_cursor_visible"],"deny":[]}},"allow-set-decorations":{"identifier":"allow-set-decorations","description":"Enables the set_decorations command without any pre-configured scope.","commands":{"allow":["set_decorations"],"deny":[]}},"allow-set-effects":{"identifier":"allow-set-effects","description":"Enables the set_effects command without any pre-configured scope.","commands":{"allow":["set_effects"],"deny":[]}},"allow-set-enabled":{"identifier":"allow-set-enabled","description":"Enables the set_enabled command without any pre-configured scope.","commands":{"allow":["set_enabled"],"deny":[]}},"allow-set-focus":{"identifier":"allow-set-focus","description":"Enables the set_focus command without any pre-configured scope.","commands":{"allow":["set_focus"],"deny":[]}},"allow-set-fullscreen":{"identifier":"allow-set-fullscreen","description":"Enables the set_fullscreen command without any pre-configured scope.","commands":{"allow":["set_fullscreen"],"deny":[]}},"allow-set-icon":{"identifier":"allow-set-icon","description":"Enables the set_icon command without any pre-configured scope.","commands":{"allow":["set_icon"],"deny":[]}},"allow-set-ignore-cursor-events":{"identifier":"allow-set-ignore-cursor-events","description":"Enables the set_ignore_cursor_events command without any pre-configured scope.","commands":{"allow":["set_ignore_cursor_events"],"deny":[]}},"allow-set-max-size":{"identifier":"allow-set-max-size","description":"Enables the set_max_size command without any pre-configured scope.","commands":{"allow":["set_max_size"],"deny":[]}},"allow-set-maximizable":{"identifier":"allow-set-maximizable","description":"Enables the set_maximizable command without any pre-configured scope.","commands":{"allow":["set_maximizable"],"deny":[]}},"allow-set-min-size":{"identifier":"allow-set-min-size","description":"Enables the set_min_size command without any pre-configured scope.","commands":{"allow":["set_min_size"],"deny":[]}},"allow-set-minimizable":{"identifier":"allow-set-minimizable","description":"Enables the set_minimizable command without any pre-configured scope.","commands":{"allow":["set_minimizable"],"deny":[]}},"allow-set-overlay-icon":{"identifier":"allow-set-overlay-icon","description":"Enables the set_overlay_icon command without any pre-configured scope.","commands":{"allow":["set_overlay_icon"],"deny":[]}},"allow-set-position":{"identifier":"allow-set-position","description":"Enables the set_position command without any pre-configured scope.","commands":{"allow":["set_position"],"deny":[]}},"allow-set-progress-bar":{"identifier":"allow-set-progress-bar","description":"Enables the set_progress_bar command without any pre-configured scope.","commands":{"allow":["set_progress_bar"],"deny":[]}},"allow-set-resizable":{"identifier":"allow-set-resizable","description":"Enables the set_resizable command without any pre-configured scope.","commands":{"allow":["set_resizable"],"deny":[]}},"allow-set-shadow":{"identifier":"allow-set-shadow","description":"Enables the set_shadow command without any pre-configured scope.","commands":{"allow":["set_shadow"],"deny":[]}},"allow-set-size":{"identifier":"allow-set-size","description":"Enables the set_size command without any pre-configured scope.","commands":{"allow":["set_size"],"deny":[]}},"allow-set-size-constraints":{"identifier":"allow-set-size-constraints","description":"Enables the set_size_constraints command without any pre-configured scope.","commands":{"allow":["set_size_constraints"],"deny":[]}},"allow-set-skip-taskbar":{"identifier":"allow-set-skip-taskbar","description":"Enables the set_skip_taskbar command without any pre-configured scope.","commands":{"allow":["set_skip_taskbar"],"deny":[]}},"allow-set-theme":{"identifier":"allow-set-theme","description":"Enables the set_theme command without any pre-configured scope.","commands":{"allow":["set_theme"],"deny":[]}},"allow-set-title":{"identifier":"allow-set-title","description":"Enables the set_title command without any pre-configured scope.","commands":{"allow":["set_title"],"deny":[]}},"allow-set-title-bar-style":{"identifier":"allow-set-title-bar-style","description":"Enables the set_title_bar_style command without any pre-configured scope.","commands":{"allow":["set_title_bar_style"],"deny":[]}},"allow-set-visible-on-all-workspaces":{"identifier":"allow-set-visible-on-all-workspaces","description":"Enables the set_visible_on_all_workspaces command without any pre-configured scope.","commands":{"allow":["set_visible_on_all_workspaces"],"deny":[]}},"allow-show":{"identifier":"allow-show","description":"Enables the show command without any pre-configured scope.","commands":{"allow":["show"],"deny":[]}},"allow-start-dragging":{"identifier":"allow-start-dragging","description":"Enables the start_dragging command without any pre-configured scope.","commands":{"allow":["start_dragging"],"deny":[]}},"allow-start-resize-dragging":{"identifier":"allow-start-resize-dragging","description":"Enables the start_resize_dragging command without any pre-configured scope.","commands":{"allow":["start_resize_dragging"],"deny":[]}},"allow-theme":{"identifier":"allow-theme","description":"Enables the theme command without any pre-configured scope.","commands":{"allow":["theme"],"deny":[]}},"allow-title":{"identifier":"allow-title","description":"Enables the title command without any pre-configured scope.","commands":{"allow":["title"],"deny":[]}},"allow-toggle-maximize":{"identifier":"allow-toggle-maximize","description":"Enables the toggle_maximize command without any pre-configured scope.","commands":{"allow":["toggle_maximize"],"deny":[]}},"allow-unmaximize":{"identifier":"allow-unmaximize","description":"Enables the unmaximize command without any pre-configured scope.","commands":{"allow":["unmaximize"],"deny":[]}},"allow-unminimize":{"identifier":"allow-unminimize","description":"Enables the unminimize command without any pre-configured scope.","commands":{"allow":["unminimize"],"deny":[]}},"deny-available-monitors":{"identifier":"deny-available-monitors","description":"Denies the available_monitors command without any pre-configured scope.","commands":{"allow":[],"deny":["available_monitors"]}},"deny-center":{"identifier":"deny-center","description":"Denies the center command without any pre-configured scope.","commands":{"allow":[],"deny":["center"]}},"deny-close":{"identifier":"deny-close","description":"Denies the close command without any pre-configured scope.","commands":{"allow":[],"deny":["close"]}},"deny-create":{"identifier":"deny-create","description":"Denies the create command without any pre-configured scope.","commands":{"allow":[],"deny":["create"]}},"deny-current-monitor":{"identifier":"deny-current-monitor","description":"Denies the current_monitor command without any pre-configured scope.","commands":{"allow":[],"deny":["current_monitor"]}},"deny-cursor-position":{"identifier":"deny-cursor-position","description":"Denies the cursor_position command without any pre-configured scope.","commands":{"allow":[],"deny":["cursor_position"]}},"deny-destroy":{"identifier":"deny-destroy","description":"Denies the destroy command without any pre-configured scope.","commands":{"allow":[],"deny":["destroy"]}},"deny-get-all-windows":{"identifier":"deny-get-all-windows","description":"Denies the get_all_windows command without any pre-configured scope.","commands":{"allow":[],"deny":["get_all_windows"]}},"deny-hide":{"identifier":"deny-hide","description":"Denies the hide command without any pre-configured scope.","commands":{"allow":[],"deny":["hide"]}},"deny-inner-position":{"identifier":"deny-inner-position","description":"Denies the inner_position command without any pre-configured scope.","commands":{"allow":[],"deny":["inner_position"]}},"deny-inner-size":{"identifier":"deny-inner-size","description":"Denies the inner_size command without any pre-configured scope.","commands":{"allow":[],"deny":["inner_size"]}},"deny-internal-toggle-maximize":{"identifier":"deny-internal-toggle-maximize","description":"Denies the internal_toggle_maximize command without any pre-configured scope.","commands":{"allow":[],"deny":["internal_toggle_maximize"]}},"deny-is-always-on-top":{"identifier":"deny-is-always-on-top","description":"Denies the is_always_on_top command without any pre-configured scope.","commands":{"allow":[],"deny":["is_always_on_top"]}},"deny-is-closable":{"identifier":"deny-is-closable","description":"Denies the is_closable command without any pre-configured scope.","commands":{"allow":[],"deny":["is_closable"]}},"deny-is-decorated":{"identifier":"deny-is-decorated","description":"Denies the is_decorated command without any pre-configured scope.","commands":{"allow":[],"deny":["is_decorated"]}},"deny-is-enabled":{"identifier":"deny-is-enabled","description":"Denies the is_enabled command without any pre-configured scope.","commands":{"allow":[],"deny":["is_enabled"]}},"deny-is-focused":{"identifier":"deny-is-focused","description":"Denies the is_focused command without any pre-configured scope.","commands":{"allow":[],"deny":["is_focused"]}},"deny-is-fullscreen":{"identifier":"deny-is-fullscreen","description":"Denies the is_fullscreen command without any pre-configured scope.","commands":{"allow":[],"deny":["is_fullscreen"]}},"deny-is-maximizable":{"identifier":"deny-is-maximizable","description":"Denies the is_maximizable command without any pre-configured scope.","commands":{"allow":[],"deny":["is_maximizable"]}},"deny-is-maximized":{"identifier":"deny-is-maximized","description":"Denies the is_maximized command without any pre-configured scope.","commands":{"allow":[],"deny":["is_maximized"]}},"deny-is-minimizable":{"identifier":"deny-is-minimizable","description":"Denies the is_minimizable command without any pre-configured scope.","commands":{"allow":[],"deny":["is_minimizable"]}},"deny-is-minimized":{"identifier":"deny-is-minimized","description":"Denies the is_minimized command without any pre-configured scope.","commands":{"allow":[],"deny":["is_minimized"]}},"deny-is-resizable":{"identifier":"deny-is-resizable","description":"Denies the is_resizable command without any pre-configured scope.","commands":{"allow":[],"deny":["is_resizable"]}},"deny-is-visible":{"identifier":"deny-is-visible","description":"Denies the is_visible command without any pre-configured scope.","commands":{"allow":[],"deny":["is_visible"]}},"deny-maximize":{"identifier":"deny-maximize","description":"Denies the maximize command without any pre-configured scope.","commands":{"allow":[],"deny":["maximize"]}},"deny-minimize":{"identifier":"deny-minimize","description":"Denies the minimize command without any pre-configured scope.","commands":{"allow":[],"deny":["minimize"]}},"deny-monitor-from-point":{"identifier":"deny-monitor-from-point","description":"Denies the monitor_from_point command without any pre-configured scope.","commands":{"allow":[],"deny":["monitor_from_point"]}},"deny-outer-position":{"identifier":"deny-outer-position","description":"Denies the outer_position command without any pre-configured scope.","commands":{"allow":[],"deny":["outer_position"]}},"deny-outer-size":{"identifier":"deny-outer-size","description":"Denies the outer_size command without any pre-configured scope.","commands":{"allow":[],"deny":["outer_size"]}},"deny-primary-monitor":{"identifier":"deny-primary-monitor","description":"Denies the primary_monitor command without any pre-configured scope.","commands":{"allow":[],"deny":["primary_monitor"]}},"deny-request-user-attention":{"identifier":"deny-request-user-attention","description":"Denies the request_user_attention command without any pre-configured scope.","commands":{"allow":[],"deny":["request_user_attention"]}},"deny-scale-factor":{"identifier":"deny-scale-factor","description":"Denies the scale_factor command without any pre-configured scope.","commands":{"allow":[],"deny":["scale_factor"]}},"deny-set-always-on-bottom":{"identifier":"deny-set-always-on-bottom","description":"Denies the set_always_on_bottom command without any pre-configured scope.","commands":{"allow":[],"deny":["set_always_on_bottom"]}},"deny-set-always-on-top":{"identifier":"deny-set-always-on-top","description":"Denies the set_always_on_top command without any pre-configured scope.","commands":{"allow":[],"deny":["set_always_on_top"]}},"deny-set-background-color":{"identifier":"deny-set-background-color","description":"Denies the set_background_color command without any pre-configured scope.","commands":{"allow":[],"deny":["set_background_color"]}},"deny-set-badge-count":{"identifier":"deny-set-badge-count","description":"Denies the set_badge_count command without any pre-configured scope.","commands":{"allow":[],"deny":["set_badge_count"]}},"deny-set-badge-label":{"identifier":"deny-set-badge-label","description":"Denies the set_badge_label command without any pre-configured scope.","commands":{"allow":[],"deny":["set_badge_label"]}},"deny-set-closable":{"identifier":"deny-set-closable","description":"Denies the set_closable command without any pre-configured scope.","commands":{"allow":[],"deny":["set_closable"]}},"deny-set-content-protected":{"identifier":"deny-set-content-protected","description":"Denies the set_content_protected command without any pre-configured scope.","commands":{"allow":[],"deny":["set_content_protected"]}},"deny-set-cursor-grab":{"identifier":"deny-set-cursor-grab","description":"Denies the set_cursor_grab command without any pre-configured scope.","commands":{"allow":[],"deny":["set_cursor_grab"]}},"deny-set-cursor-icon":{"identifier":"deny-set-cursor-icon","description":"Denies the set_cursor_icon command without any pre-configured scope.","commands":{"allow":[],"deny":["set_cursor_icon"]}},"deny-set-cursor-position":{"identifier":"deny-set-cursor-position","description":"Denies the set_cursor_position command without any pre-configured scope.","commands":{"allow":[],"deny":["set_cursor_position"]}},"deny-set-cursor-visible":{"identifier":"deny-set-cursor-visible","description":"Denies the set_cursor_visible command without any pre-configured scope.","commands":{"allow":[],"deny":["set_cursor_visible"]}},"deny-set-decorations":{"identifier":"deny-set-decorations","description":"Denies the set_decorations command without any pre-configured scope.","commands":{"allow":[],"deny":["set_decorations"]}},"deny-set-effects":{"identifier":"deny-set-effects","description":"Denies the set_effects command without any pre-configured scope.","commands":{"allow":[],"deny":["set_effects"]}},"deny-set-enabled":{"identifier":"deny-set-enabled","description":"Denies the set_enabled command without any pre-configured scope.","commands":{"allow":[],"deny":["set_enabled"]}},"deny-set-focus":{"identifier":"deny-set-focus","description":"Denies the set_focus command without any pre-configured scope.","commands":{"allow":[],"deny":["set_focus"]}},"deny-set-fullscreen":{"identifier":"deny-set-fullscreen","description":"Denies the set_fullscreen command without any pre-configured scope.","commands":{"allow":[],"deny":["set_fullscreen"]}},"deny-set-icon":{"identifier":"deny-set-icon","description":"Denies the set_icon command without any pre-configured scope.","commands":{"allow":[],"deny":["set_icon"]}},"deny-set-ignore-cursor-events":{"identifier":"deny-set-ignore-cursor-events","description":"Denies the set_ignore_cursor_events command without any pre-configured scope.","commands":{"allow":[],"deny":["set_ignore_cursor_events"]}},"deny-set-max-size":{"identifier":"deny-set-max-size","description":"Denies the set_max_size command without any pre-configured scope.","commands":{"allow":[],"deny":["set_max_size"]}},"deny-set-maximizable":{"identifier":"deny-set-maximizable","description":"Denies the set_maximizable command without any pre-configured scope.","commands":{"allow":[],"deny":["set_maximizable"]}},"deny-set-min-size":{"identifier":"deny-set-min-size","description":"Denies the set_min_size command without any pre-configured scope.","commands":{"allow":[],"deny":["set_min_size"]}},"deny-set-minimizable":{"identifier":"deny-set-minimizable","description":"Denies the set_minimizable command without any pre-configured scope.","commands":{"allow":[],"deny":["set_minimizable"]}},"deny-set-overlay-icon":{"identifier":"deny-set-overlay-icon","description":"Denies the set_overlay_icon command without any pre-configured scope.","commands":{"allow":[],"deny":["set_overlay_icon"]}},"deny-set-position":{"identifier":"deny-set-position","description":"Denies the set_position command without any pre-configured scope.","commands":{"allow":[],"deny":["set_position"]}},"deny-set-progress-bar":{"identifier":"deny-set-progress-bar","description":"Denies the set_progress_bar command without any pre-configured scope.","commands":{"allow":[],"deny":["set_progress_bar"]}},"deny-set-resizable":{"identifier":"deny-set-resizable","description":"Denies the set_resizable command without any pre-configured scope.","commands":{"allow":[],"deny":["set_resizable"]}},"deny-set-shadow":{"identifier":"deny-set-shadow","description":"Denies the set_shadow command without any pre-configured scope.","commands":{"allow":[],"deny":["set_shadow"]}},"deny-set-size":{"identifier":"deny-set-size","description":"Denies the set_size command without any pre-configured scope.","commands":{"allow":[],"deny":["set_size"]}},"deny-set-size-constraints":{"identifier":"deny-set-size-constraints","description":"Denies the set_size_constraints command without any pre-configured scope.","commands":{"allow":[],"deny":["set_size_constraints"]}},"deny-set-skip-taskbar":{"identifier":"deny-set-skip-taskbar","description":"Denies the set_skip_taskbar command without any pre-configured scope.","commands":{"allow":[],"deny":["set_skip_taskbar"]}},"deny-set-theme":{"identifier":"deny-set-theme","description":"Denies the set_theme command without any pre-configured scope.","commands":{"allow":[],"deny":["set_theme"]}},"deny-set-title":{"identifier":"deny-set-title","description":"Denies the set_title command without any pre-configured scope.","commands":{"allow":[],"deny":["set_title"]}},"deny-set-title-bar-style":{"identifier":"deny-set-title-bar-style","description":"Denies the set_title_bar_style command without any pre-configured scope.","commands":{"allow":[],"deny":["set_title_bar_style"]}},"deny-set-visible-on-all-workspaces":{"identifier":"deny-set-visible-on-all-workspaces","description":"Denies the set_visible_on_all_workspaces command without any pre-configured scope.","commands":{"allow":[],"deny":["set_visible_on_all_workspaces"]}},"deny-show":{"identifier":"deny-show","description":"Denies the show command without any pre-configured scope.","commands":{"allow":[],"deny":["show"]}},"deny-start-dragging":{"identifier":"deny-start-dragging","description":"Denies the start_dragging command without any pre-configured scope.","commands":{"allow":[],"deny":["start_dragging"]}},"deny-start-resize-dragging":{"identifier":"deny-start-resize-dragging","description":"Denies the start_resize_dragging command without any pre-configured scope.","commands":{"allow":[],"deny":["start_resize_dragging"]}},"deny-theme":{"identifier":"deny-theme","description":"Denies the theme command without any pre-configured scope.","commands":{"allow":[],"deny":["theme"]}},"deny-title":{"identifier":"deny-title","description":"Denies the title command without any pre-configured scope.","commands":{"allow":[],"deny":["title"]}},"deny-toggle-maximize":{"identifier":"deny-toggle-maximize","description":"Denies the toggle_maximize command without any pre-configured scope.","commands":{"allow":[],"deny":["toggle_maximize"]}},"deny-unmaximize":{"identifier":"deny-unmaximize","description":"Denies the unmaximize command without any pre-configured scope.","commands":{"allow":[],"deny":["unmaximize"]}},"deny-unminimize":{"identifier":"deny-unminimize","description":"Denies the unminimize command without any pre-configured scope.","commands":{"allow":[],"deny":["unminimize"]}}},"permission_sets":{},"global_scope_schema":null}} \ No newline at end of file +{"core":{"default_permission":{"identifier":"default","description":"Default core plugins set.","permissions":["core:path:default","core:event:default","core:window:default","core:webview:default","core:app:default","core:image:default","core:resources:default","core:menu:default","core:tray:default"]},"permissions":{},"permission_sets":{},"global_scope_schema":null},"core:app":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin.","permissions":["allow-version","allow-name","allow-tauri-version","allow-identifier","allow-bundle-type","allow-register-listener","allow-remove-listener"]},"permissions":{"allow-app-hide":{"identifier":"allow-app-hide","description":"Enables the app_hide command without any pre-configured scope.","commands":{"allow":["app_hide"],"deny":[]}},"allow-app-show":{"identifier":"allow-app-show","description":"Enables the app_show command without any pre-configured scope.","commands":{"allow":["app_show"],"deny":[]}},"allow-bundle-type":{"identifier":"allow-bundle-type","description":"Enables the bundle_type command without any pre-configured scope.","commands":{"allow":["bundle_type"],"deny":[]}},"allow-default-window-icon":{"identifier":"allow-default-window-icon","description":"Enables the default_window_icon command without any pre-configured scope.","commands":{"allow":["default_window_icon"],"deny":[]}},"allow-fetch-data-store-identifiers":{"identifier":"allow-fetch-data-store-identifiers","description":"Enables the fetch_data_store_identifiers command without any pre-configured scope.","commands":{"allow":["fetch_data_store_identifiers"],"deny":[]}},"allow-identifier":{"identifier":"allow-identifier","description":"Enables the identifier command without any pre-configured scope.","commands":{"allow":["identifier"],"deny":[]}},"allow-name":{"identifier":"allow-name","description":"Enables the name command without any pre-configured scope.","commands":{"allow":["name"],"deny":[]}},"allow-register-listener":{"identifier":"allow-register-listener","description":"Enables the register_listener command without any pre-configured scope.","commands":{"allow":["register_listener"],"deny":[]}},"allow-remove-data-store":{"identifier":"allow-remove-data-store","description":"Enables the remove_data_store command without any pre-configured scope.","commands":{"allow":["remove_data_store"],"deny":[]}},"allow-remove-listener":{"identifier":"allow-remove-listener","description":"Enables the remove_listener command without any pre-configured scope.","commands":{"allow":["remove_listener"],"deny":[]}},"allow-set-app-theme":{"identifier":"allow-set-app-theme","description":"Enables the set_app_theme command without any pre-configured scope.","commands":{"allow":["set_app_theme"],"deny":[]}},"allow-set-dock-visibility":{"identifier":"allow-set-dock-visibility","description":"Enables the set_dock_visibility command without any pre-configured scope.","commands":{"allow":["set_dock_visibility"],"deny":[]}},"allow-tauri-version":{"identifier":"allow-tauri-version","description":"Enables the tauri_version command without any pre-configured scope.","commands":{"allow":["tauri_version"],"deny":[]}},"allow-version":{"identifier":"allow-version","description":"Enables the version command without any pre-configured scope.","commands":{"allow":["version"],"deny":[]}},"deny-app-hide":{"identifier":"deny-app-hide","description":"Denies the app_hide command without any pre-configured scope.","commands":{"allow":[],"deny":["app_hide"]}},"deny-app-show":{"identifier":"deny-app-show","description":"Denies the app_show command without any pre-configured scope.","commands":{"allow":[],"deny":["app_show"]}},"deny-bundle-type":{"identifier":"deny-bundle-type","description":"Denies the bundle_type command without any pre-configured scope.","commands":{"allow":[],"deny":["bundle_type"]}},"deny-default-window-icon":{"identifier":"deny-default-window-icon","description":"Denies the default_window_icon command without any pre-configured scope.","commands":{"allow":[],"deny":["default_window_icon"]}},"deny-fetch-data-store-identifiers":{"identifier":"deny-fetch-data-store-identifiers","description":"Denies the fetch_data_store_identifiers command without any pre-configured scope.","commands":{"allow":[],"deny":["fetch_data_store_identifiers"]}},"deny-identifier":{"identifier":"deny-identifier","description":"Denies the identifier command without any pre-configured scope.","commands":{"allow":[],"deny":["identifier"]}},"deny-name":{"identifier":"deny-name","description":"Denies the name command without any pre-configured scope.","commands":{"allow":[],"deny":["name"]}},"deny-register-listener":{"identifier":"deny-register-listener","description":"Denies the register_listener command without any pre-configured scope.","commands":{"allow":[],"deny":["register_listener"]}},"deny-remove-data-store":{"identifier":"deny-remove-data-store","description":"Denies the remove_data_store command without any pre-configured scope.","commands":{"allow":[],"deny":["remove_data_store"]}},"deny-remove-listener":{"identifier":"deny-remove-listener","description":"Denies the remove_listener command without any pre-configured scope.","commands":{"allow":[],"deny":["remove_listener"]}},"deny-set-app-theme":{"identifier":"deny-set-app-theme","description":"Denies the set_app_theme command without any pre-configured scope.","commands":{"allow":[],"deny":["set_app_theme"]}},"deny-set-dock-visibility":{"identifier":"deny-set-dock-visibility","description":"Denies the set_dock_visibility command without any pre-configured scope.","commands":{"allow":[],"deny":["set_dock_visibility"]}},"deny-tauri-version":{"identifier":"deny-tauri-version","description":"Denies the tauri_version command without any pre-configured scope.","commands":{"allow":[],"deny":["tauri_version"]}},"deny-version":{"identifier":"deny-version","description":"Denies the version command without any pre-configured scope.","commands":{"allow":[],"deny":["version"]}}},"permission_sets":{},"global_scope_schema":null},"core:event":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin, which enables all commands.","permissions":["allow-listen","allow-unlisten","allow-emit","allow-emit-to"]},"permissions":{"allow-emit":{"identifier":"allow-emit","description":"Enables the emit command without any pre-configured scope.","commands":{"allow":["emit"],"deny":[]}},"allow-emit-to":{"identifier":"allow-emit-to","description":"Enables the emit_to command without any pre-configured scope.","commands":{"allow":["emit_to"],"deny":[]}},"allow-listen":{"identifier":"allow-listen","description":"Enables the listen command without any pre-configured scope.","commands":{"allow":["listen"],"deny":[]}},"allow-unlisten":{"identifier":"allow-unlisten","description":"Enables the unlisten command without any pre-configured scope.","commands":{"allow":["unlisten"],"deny":[]}},"deny-emit":{"identifier":"deny-emit","description":"Denies the emit command without any pre-configured scope.","commands":{"allow":[],"deny":["emit"]}},"deny-emit-to":{"identifier":"deny-emit-to","description":"Denies the emit_to command without any pre-configured scope.","commands":{"allow":[],"deny":["emit_to"]}},"deny-listen":{"identifier":"deny-listen","description":"Denies the listen command without any pre-configured scope.","commands":{"allow":[],"deny":["listen"]}},"deny-unlisten":{"identifier":"deny-unlisten","description":"Denies the unlisten command without any pre-configured scope.","commands":{"allow":[],"deny":["unlisten"]}}},"permission_sets":{},"global_scope_schema":null},"core:image":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin, which enables all commands.","permissions":["allow-new","allow-from-bytes","allow-from-path","allow-rgba","allow-size"]},"permissions":{"allow-from-bytes":{"identifier":"allow-from-bytes","description":"Enables the from_bytes command without any pre-configured scope.","commands":{"allow":["from_bytes"],"deny":[]}},"allow-from-path":{"identifier":"allow-from-path","description":"Enables the from_path command without any pre-configured scope.","commands":{"allow":["from_path"],"deny":[]}},"allow-new":{"identifier":"allow-new","description":"Enables the new command without any pre-configured scope.","commands":{"allow":["new"],"deny":[]}},"allow-rgba":{"identifier":"allow-rgba","description":"Enables the rgba command without any pre-configured scope.","commands":{"allow":["rgba"],"deny":[]}},"allow-size":{"identifier":"allow-size","description":"Enables the size command without any pre-configured scope.","commands":{"allow":["size"],"deny":[]}},"deny-from-bytes":{"identifier":"deny-from-bytes","description":"Denies the from_bytes command without any pre-configured scope.","commands":{"allow":[],"deny":["from_bytes"]}},"deny-from-path":{"identifier":"deny-from-path","description":"Denies the from_path command without any pre-configured scope.","commands":{"allow":[],"deny":["from_path"]}},"deny-new":{"identifier":"deny-new","description":"Denies the new command without any pre-configured scope.","commands":{"allow":[],"deny":["new"]}},"deny-rgba":{"identifier":"deny-rgba","description":"Denies the rgba command without any pre-configured scope.","commands":{"allow":[],"deny":["rgba"]}},"deny-size":{"identifier":"deny-size","description":"Denies the size command without any pre-configured scope.","commands":{"allow":[],"deny":["size"]}}},"permission_sets":{},"global_scope_schema":null},"core:menu":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin, which enables all commands.","permissions":["allow-new","allow-append","allow-prepend","allow-insert","allow-remove","allow-remove-at","allow-items","allow-get","allow-popup","allow-create-default","allow-set-as-app-menu","allow-set-as-window-menu","allow-text","allow-set-text","allow-is-enabled","allow-set-enabled","allow-set-accelerator","allow-set-as-windows-menu-for-nsapp","allow-set-as-help-menu-for-nsapp","allow-is-checked","allow-set-checked","allow-set-icon"]},"permissions":{"allow-append":{"identifier":"allow-append","description":"Enables the append command without any pre-configured scope.","commands":{"allow":["append"],"deny":[]}},"allow-create-default":{"identifier":"allow-create-default","description":"Enables the create_default command without any pre-configured scope.","commands":{"allow":["create_default"],"deny":[]}},"allow-get":{"identifier":"allow-get","description":"Enables the get command without any pre-configured scope.","commands":{"allow":["get"],"deny":[]}},"allow-insert":{"identifier":"allow-insert","description":"Enables the insert command without any pre-configured scope.","commands":{"allow":["insert"],"deny":[]}},"allow-is-checked":{"identifier":"allow-is-checked","description":"Enables the is_checked command without any pre-configured scope.","commands":{"allow":["is_checked"],"deny":[]}},"allow-is-enabled":{"identifier":"allow-is-enabled","description":"Enables the is_enabled command without any pre-configured scope.","commands":{"allow":["is_enabled"],"deny":[]}},"allow-items":{"identifier":"allow-items","description":"Enables the items command without any pre-configured scope.","commands":{"allow":["items"],"deny":[]}},"allow-new":{"identifier":"allow-new","description":"Enables the new command without any pre-configured scope.","commands":{"allow":["new"],"deny":[]}},"allow-popup":{"identifier":"allow-popup","description":"Enables the popup command without any pre-configured scope.","commands":{"allow":["popup"],"deny":[]}},"allow-prepend":{"identifier":"allow-prepend","description":"Enables the prepend command without any pre-configured scope.","commands":{"allow":["prepend"],"deny":[]}},"allow-remove":{"identifier":"allow-remove","description":"Enables the remove command without any pre-configured scope.","commands":{"allow":["remove"],"deny":[]}},"allow-remove-at":{"identifier":"allow-remove-at","description":"Enables the remove_at command without any pre-configured scope.","commands":{"allow":["remove_at"],"deny":[]}},"allow-set-accelerator":{"identifier":"allow-set-accelerator","description":"Enables the set_accelerator command without any pre-configured scope.","commands":{"allow":["set_accelerator"],"deny":[]}},"allow-set-as-app-menu":{"identifier":"allow-set-as-app-menu","description":"Enables the set_as_app_menu command without any pre-configured scope.","commands":{"allow":["set_as_app_menu"],"deny":[]}},"allow-set-as-help-menu-for-nsapp":{"identifier":"allow-set-as-help-menu-for-nsapp","description":"Enables the set_as_help_menu_for_nsapp command without any pre-configured scope.","commands":{"allow":["set_as_help_menu_for_nsapp"],"deny":[]}},"allow-set-as-window-menu":{"identifier":"allow-set-as-window-menu","description":"Enables the set_as_window_menu command without any pre-configured scope.","commands":{"allow":["set_as_window_menu"],"deny":[]}},"allow-set-as-windows-menu-for-nsapp":{"identifier":"allow-set-as-windows-menu-for-nsapp","description":"Enables the set_as_windows_menu_for_nsapp command without any pre-configured scope.","commands":{"allow":["set_as_windows_menu_for_nsapp"],"deny":[]}},"allow-set-checked":{"identifier":"allow-set-checked","description":"Enables the set_checked command without any pre-configured scope.","commands":{"allow":["set_checked"],"deny":[]}},"allow-set-enabled":{"identifier":"allow-set-enabled","description":"Enables the set_enabled command without any pre-configured scope.","commands":{"allow":["set_enabled"],"deny":[]}},"allow-set-icon":{"identifier":"allow-set-icon","description":"Enables the set_icon command without any pre-configured scope.","commands":{"allow":["set_icon"],"deny":[]}},"allow-set-text":{"identifier":"allow-set-text","description":"Enables the set_text command without any pre-configured scope.","commands":{"allow":["set_text"],"deny":[]}},"allow-text":{"identifier":"allow-text","description":"Enables the text command without any pre-configured scope.","commands":{"allow":["text"],"deny":[]}},"deny-append":{"identifier":"deny-append","description":"Denies the append command without any pre-configured scope.","commands":{"allow":[],"deny":["append"]}},"deny-create-default":{"identifier":"deny-create-default","description":"Denies the create_default command without any pre-configured scope.","commands":{"allow":[],"deny":["create_default"]}},"deny-get":{"identifier":"deny-get","description":"Denies the get command without any pre-configured scope.","commands":{"allow":[],"deny":["get"]}},"deny-insert":{"identifier":"deny-insert","description":"Denies the insert command without any pre-configured scope.","commands":{"allow":[],"deny":["insert"]}},"deny-is-checked":{"identifier":"deny-is-checked","description":"Denies the is_checked command without any pre-configured scope.","commands":{"allow":[],"deny":["is_checked"]}},"deny-is-enabled":{"identifier":"deny-is-enabled","description":"Denies the is_enabled command without any pre-configured scope.","commands":{"allow":[],"deny":["is_enabled"]}},"deny-items":{"identifier":"deny-items","description":"Denies the items command without any pre-configured scope.","commands":{"allow":[],"deny":["items"]}},"deny-new":{"identifier":"deny-new","description":"Denies the new command without any pre-configured scope.","commands":{"allow":[],"deny":["new"]}},"deny-popup":{"identifier":"deny-popup","description":"Denies the popup command without any pre-configured scope.","commands":{"allow":[],"deny":["popup"]}},"deny-prepend":{"identifier":"deny-prepend","description":"Denies the prepend command without any pre-configured scope.","commands":{"allow":[],"deny":["prepend"]}},"deny-remove":{"identifier":"deny-remove","description":"Denies the remove command without any pre-configured scope.","commands":{"allow":[],"deny":["remove"]}},"deny-remove-at":{"identifier":"deny-remove-at","description":"Denies the remove_at command without any pre-configured scope.","commands":{"allow":[],"deny":["remove_at"]}},"deny-set-accelerator":{"identifier":"deny-set-accelerator","description":"Denies the set_accelerator command without any pre-configured scope.","commands":{"allow":[],"deny":["set_accelerator"]}},"deny-set-as-app-menu":{"identifier":"deny-set-as-app-menu","description":"Denies the set_as_app_menu command without any pre-configured scope.","commands":{"allow":[],"deny":["set_as_app_menu"]}},"deny-set-as-help-menu-for-nsapp":{"identifier":"deny-set-as-help-menu-for-nsapp","description":"Denies the set_as_help_menu_for_nsapp command without any pre-configured scope.","commands":{"allow":[],"deny":["set_as_help_menu_for_nsapp"]}},"deny-set-as-window-menu":{"identifier":"deny-set-as-window-menu","description":"Denies the set_as_window_menu command without any pre-configured scope.","commands":{"allow":[],"deny":["set_as_window_menu"]}},"deny-set-as-windows-menu-for-nsapp":{"identifier":"deny-set-as-windows-menu-for-nsapp","description":"Denies the set_as_windows_menu_for_nsapp command without any pre-configured scope.","commands":{"allow":[],"deny":["set_as_windows_menu_for_nsapp"]}},"deny-set-checked":{"identifier":"deny-set-checked","description":"Denies the set_checked command without any pre-configured scope.","commands":{"allow":[],"deny":["set_checked"]}},"deny-set-enabled":{"identifier":"deny-set-enabled","description":"Denies the set_enabled command without any pre-configured scope.","commands":{"allow":[],"deny":["set_enabled"]}},"deny-set-icon":{"identifier":"deny-set-icon","description":"Denies the set_icon command without any pre-configured scope.","commands":{"allow":[],"deny":["set_icon"]}},"deny-set-text":{"identifier":"deny-set-text","description":"Denies the set_text command without any pre-configured scope.","commands":{"allow":[],"deny":["set_text"]}},"deny-text":{"identifier":"deny-text","description":"Denies the text command without any pre-configured scope.","commands":{"allow":[],"deny":["text"]}}},"permission_sets":{},"global_scope_schema":null},"core:path":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin, which enables all commands.","permissions":["allow-resolve-directory","allow-resolve","allow-normalize","allow-join","allow-dirname","allow-extname","allow-basename","allow-is-absolute"]},"permissions":{"allow-basename":{"identifier":"allow-basename","description":"Enables the basename command without any pre-configured scope.","commands":{"allow":["basename"],"deny":[]}},"allow-dirname":{"identifier":"allow-dirname","description":"Enables the dirname command without any pre-configured scope.","commands":{"allow":["dirname"],"deny":[]}},"allow-extname":{"identifier":"allow-extname","description":"Enables the extname command without any pre-configured scope.","commands":{"allow":["extname"],"deny":[]}},"allow-is-absolute":{"identifier":"allow-is-absolute","description":"Enables the is_absolute command without any pre-configured scope.","commands":{"allow":["is_absolute"],"deny":[]}},"allow-join":{"identifier":"allow-join","description":"Enables the join command without any pre-configured scope.","commands":{"allow":["join"],"deny":[]}},"allow-normalize":{"identifier":"allow-normalize","description":"Enables the normalize command without any pre-configured scope.","commands":{"allow":["normalize"],"deny":[]}},"allow-resolve":{"identifier":"allow-resolve","description":"Enables the resolve command without any pre-configured scope.","commands":{"allow":["resolve"],"deny":[]}},"allow-resolve-directory":{"identifier":"allow-resolve-directory","description":"Enables the resolve_directory command without any pre-configured scope.","commands":{"allow":["resolve_directory"],"deny":[]}},"deny-basename":{"identifier":"deny-basename","description":"Denies the basename command without any pre-configured scope.","commands":{"allow":[],"deny":["basename"]}},"deny-dirname":{"identifier":"deny-dirname","description":"Denies the dirname command without any pre-configured scope.","commands":{"allow":[],"deny":["dirname"]}},"deny-extname":{"identifier":"deny-extname","description":"Denies the extname command without any pre-configured scope.","commands":{"allow":[],"deny":["extname"]}},"deny-is-absolute":{"identifier":"deny-is-absolute","description":"Denies the is_absolute command without any pre-configured scope.","commands":{"allow":[],"deny":["is_absolute"]}},"deny-join":{"identifier":"deny-join","description":"Denies the join command without any pre-configured scope.","commands":{"allow":[],"deny":["join"]}},"deny-normalize":{"identifier":"deny-normalize","description":"Denies the normalize command without any pre-configured scope.","commands":{"allow":[],"deny":["normalize"]}},"deny-resolve":{"identifier":"deny-resolve","description":"Denies the resolve command without any pre-configured scope.","commands":{"allow":[],"deny":["resolve"]}},"deny-resolve-directory":{"identifier":"deny-resolve-directory","description":"Denies the resolve_directory command without any pre-configured scope.","commands":{"allow":[],"deny":["resolve_directory"]}}},"permission_sets":{},"global_scope_schema":null},"core:resources":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin, which enables all commands.","permissions":["allow-close"]},"permissions":{"allow-close":{"identifier":"allow-close","description":"Enables the close command without any pre-configured scope.","commands":{"allow":["close"],"deny":[]}},"deny-close":{"identifier":"deny-close","description":"Denies the close command without any pre-configured scope.","commands":{"allow":[],"deny":["close"]}}},"permission_sets":{},"global_scope_schema":null},"core:tray":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin, which enables all commands.","permissions":["allow-new","allow-get-by-id","allow-remove-by-id","allow-set-icon","allow-set-menu","allow-set-tooltip","allow-set-title","allow-set-visible","allow-set-temp-dir-path","allow-set-icon-as-template","allow-set-show-menu-on-left-click"]},"permissions":{"allow-get-by-id":{"identifier":"allow-get-by-id","description":"Enables the get_by_id command without any pre-configured scope.","commands":{"allow":["get_by_id"],"deny":[]}},"allow-new":{"identifier":"allow-new","description":"Enables the new command without any pre-configured scope.","commands":{"allow":["new"],"deny":[]}},"allow-remove-by-id":{"identifier":"allow-remove-by-id","description":"Enables the remove_by_id command without any pre-configured scope.","commands":{"allow":["remove_by_id"],"deny":[]}},"allow-set-icon":{"identifier":"allow-set-icon","description":"Enables the set_icon command without any pre-configured scope.","commands":{"allow":["set_icon"],"deny":[]}},"allow-set-icon-as-template":{"identifier":"allow-set-icon-as-template","description":"Enables the set_icon_as_template command without any pre-configured scope.","commands":{"allow":["set_icon_as_template"],"deny":[]}},"allow-set-menu":{"identifier":"allow-set-menu","description":"Enables the set_menu command without any pre-configured scope.","commands":{"allow":["set_menu"],"deny":[]}},"allow-set-show-menu-on-left-click":{"identifier":"allow-set-show-menu-on-left-click","description":"Enables the set_show_menu_on_left_click command without any pre-configured scope.","commands":{"allow":["set_show_menu_on_left_click"],"deny":[]}},"allow-set-temp-dir-path":{"identifier":"allow-set-temp-dir-path","description":"Enables the set_temp_dir_path command without any pre-configured scope.","commands":{"allow":["set_temp_dir_path"],"deny":[]}},"allow-set-title":{"identifier":"allow-set-title","description":"Enables the set_title command without any pre-configured scope.","commands":{"allow":["set_title"],"deny":[]}},"allow-set-tooltip":{"identifier":"allow-set-tooltip","description":"Enables the set_tooltip command without any pre-configured scope.","commands":{"allow":["set_tooltip"],"deny":[]}},"allow-set-visible":{"identifier":"allow-set-visible","description":"Enables the set_visible command without any pre-configured scope.","commands":{"allow":["set_visible"],"deny":[]}},"deny-get-by-id":{"identifier":"deny-get-by-id","description":"Denies the get_by_id command without any pre-configured scope.","commands":{"allow":[],"deny":["get_by_id"]}},"deny-new":{"identifier":"deny-new","description":"Denies the new command without any pre-configured scope.","commands":{"allow":[],"deny":["new"]}},"deny-remove-by-id":{"identifier":"deny-remove-by-id","description":"Denies the remove_by_id command without any pre-configured scope.","commands":{"allow":[],"deny":["remove_by_id"]}},"deny-set-icon":{"identifier":"deny-set-icon","description":"Denies the set_icon command without any pre-configured scope.","commands":{"allow":[],"deny":["set_icon"]}},"deny-set-icon-as-template":{"identifier":"deny-set-icon-as-template","description":"Denies the set_icon_as_template command without any pre-configured scope.","commands":{"allow":[],"deny":["set_icon_as_template"]}},"deny-set-menu":{"identifier":"deny-set-menu","description":"Denies the set_menu command without any pre-configured scope.","commands":{"allow":[],"deny":["set_menu"]}},"deny-set-show-menu-on-left-click":{"identifier":"deny-set-show-menu-on-left-click","description":"Denies the set_show_menu_on_left_click command without any pre-configured scope.","commands":{"allow":[],"deny":["set_show_menu_on_left_click"]}},"deny-set-temp-dir-path":{"identifier":"deny-set-temp-dir-path","description":"Denies the set_temp_dir_path command without any pre-configured scope.","commands":{"allow":[],"deny":["set_temp_dir_path"]}},"deny-set-title":{"identifier":"deny-set-title","description":"Denies the set_title command without any pre-configured scope.","commands":{"allow":[],"deny":["set_title"]}},"deny-set-tooltip":{"identifier":"deny-set-tooltip","description":"Denies the set_tooltip command without any pre-configured scope.","commands":{"allow":[],"deny":["set_tooltip"]}},"deny-set-visible":{"identifier":"deny-set-visible","description":"Denies the set_visible command without any pre-configured scope.","commands":{"allow":[],"deny":["set_visible"]}}},"permission_sets":{},"global_scope_schema":null},"core:webview":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin.","permissions":["allow-get-all-webviews","allow-webview-position","allow-webview-size","allow-internal-toggle-devtools"]},"permissions":{"allow-clear-all-browsing-data":{"identifier":"allow-clear-all-browsing-data","description":"Enables the clear_all_browsing_data command without any pre-configured scope.","commands":{"allow":["clear_all_browsing_data"],"deny":[]}},"allow-create-webview":{"identifier":"allow-create-webview","description":"Enables the create_webview command without any pre-configured scope.","commands":{"allow":["create_webview"],"deny":[]}},"allow-create-webview-window":{"identifier":"allow-create-webview-window","description":"Enables the create_webview_window command without any pre-configured scope.","commands":{"allow":["create_webview_window"],"deny":[]}},"allow-get-all-webviews":{"identifier":"allow-get-all-webviews","description":"Enables the get_all_webviews command without any pre-configured scope.","commands":{"allow":["get_all_webviews"],"deny":[]}},"allow-internal-toggle-devtools":{"identifier":"allow-internal-toggle-devtools","description":"Enables the internal_toggle_devtools command without any pre-configured scope.","commands":{"allow":["internal_toggle_devtools"],"deny":[]}},"allow-print":{"identifier":"allow-print","description":"Enables the print command without any pre-configured scope.","commands":{"allow":["print"],"deny":[]}},"allow-reparent":{"identifier":"allow-reparent","description":"Enables the reparent command without any pre-configured scope.","commands":{"allow":["reparent"],"deny":[]}},"allow-set-webview-auto-resize":{"identifier":"allow-set-webview-auto-resize","description":"Enables the set_webview_auto_resize command without any pre-configured scope.","commands":{"allow":["set_webview_auto_resize"],"deny":[]}},"allow-set-webview-background-color":{"identifier":"allow-set-webview-background-color","description":"Enables the set_webview_background_color command without any pre-configured scope.","commands":{"allow":["set_webview_background_color"],"deny":[]}},"allow-set-webview-focus":{"identifier":"allow-set-webview-focus","description":"Enables the set_webview_focus command without any pre-configured scope.","commands":{"allow":["set_webview_focus"],"deny":[]}},"allow-set-webview-position":{"identifier":"allow-set-webview-position","description":"Enables the set_webview_position command without any pre-configured scope.","commands":{"allow":["set_webview_position"],"deny":[]}},"allow-set-webview-size":{"identifier":"allow-set-webview-size","description":"Enables the set_webview_size command without any pre-configured scope.","commands":{"allow":["set_webview_size"],"deny":[]}},"allow-set-webview-zoom":{"identifier":"allow-set-webview-zoom","description":"Enables the set_webview_zoom command without any pre-configured scope.","commands":{"allow":["set_webview_zoom"],"deny":[]}},"allow-webview-close":{"identifier":"allow-webview-close","description":"Enables the webview_close command without any pre-configured scope.","commands":{"allow":["webview_close"],"deny":[]}},"allow-webview-hide":{"identifier":"allow-webview-hide","description":"Enables the webview_hide command without any pre-configured scope.","commands":{"allow":["webview_hide"],"deny":[]}},"allow-webview-position":{"identifier":"allow-webview-position","description":"Enables the webview_position command without any pre-configured scope.","commands":{"allow":["webview_position"],"deny":[]}},"allow-webview-show":{"identifier":"allow-webview-show","description":"Enables the webview_show command without any pre-configured scope.","commands":{"allow":["webview_show"],"deny":[]}},"allow-webview-size":{"identifier":"allow-webview-size","description":"Enables the webview_size command without any pre-configured scope.","commands":{"allow":["webview_size"],"deny":[]}},"deny-clear-all-browsing-data":{"identifier":"deny-clear-all-browsing-data","description":"Denies the clear_all_browsing_data command without any pre-configured scope.","commands":{"allow":[],"deny":["clear_all_browsing_data"]}},"deny-create-webview":{"identifier":"deny-create-webview","description":"Denies the create_webview command without any pre-configured scope.","commands":{"allow":[],"deny":["create_webview"]}},"deny-create-webview-window":{"identifier":"deny-create-webview-window","description":"Denies the create_webview_window command without any pre-configured scope.","commands":{"allow":[],"deny":["create_webview_window"]}},"deny-get-all-webviews":{"identifier":"deny-get-all-webviews","description":"Denies the get_all_webviews command without any pre-configured scope.","commands":{"allow":[],"deny":["get_all_webviews"]}},"deny-internal-toggle-devtools":{"identifier":"deny-internal-toggle-devtools","description":"Denies the internal_toggle_devtools command without any pre-configured scope.","commands":{"allow":[],"deny":["internal_toggle_devtools"]}},"deny-print":{"identifier":"deny-print","description":"Denies the print command without any pre-configured scope.","commands":{"allow":[],"deny":["print"]}},"deny-reparent":{"identifier":"deny-reparent","description":"Denies the reparent command without any pre-configured scope.","commands":{"allow":[],"deny":["reparent"]}},"deny-set-webview-auto-resize":{"identifier":"deny-set-webview-auto-resize","description":"Denies the set_webview_auto_resize command without any pre-configured scope.","commands":{"allow":[],"deny":["set_webview_auto_resize"]}},"deny-set-webview-background-color":{"identifier":"deny-set-webview-background-color","description":"Denies the set_webview_background_color command without any pre-configured scope.","commands":{"allow":[],"deny":["set_webview_background_color"]}},"deny-set-webview-focus":{"identifier":"deny-set-webview-focus","description":"Denies the set_webview_focus command without any pre-configured scope.","commands":{"allow":[],"deny":["set_webview_focus"]}},"deny-set-webview-position":{"identifier":"deny-set-webview-position","description":"Denies the set_webview_position command without any pre-configured scope.","commands":{"allow":[],"deny":["set_webview_position"]}},"deny-set-webview-size":{"identifier":"deny-set-webview-size","description":"Denies the set_webview_size command without any pre-configured scope.","commands":{"allow":[],"deny":["set_webview_size"]}},"deny-set-webview-zoom":{"identifier":"deny-set-webview-zoom","description":"Denies the set_webview_zoom command without any pre-configured scope.","commands":{"allow":[],"deny":["set_webview_zoom"]}},"deny-webview-close":{"identifier":"deny-webview-close","description":"Denies the webview_close command without any pre-configured scope.","commands":{"allow":[],"deny":["webview_close"]}},"deny-webview-hide":{"identifier":"deny-webview-hide","description":"Denies the webview_hide command without any pre-configured scope.","commands":{"allow":[],"deny":["webview_hide"]}},"deny-webview-position":{"identifier":"deny-webview-position","description":"Denies the webview_position command without any pre-configured scope.","commands":{"allow":[],"deny":["webview_position"]}},"deny-webview-show":{"identifier":"deny-webview-show","description":"Denies the webview_show command without any pre-configured scope.","commands":{"allow":[],"deny":["webview_show"]}},"deny-webview-size":{"identifier":"deny-webview-size","description":"Denies the webview_size command without any pre-configured scope.","commands":{"allow":[],"deny":["webview_size"]}}},"permission_sets":{},"global_scope_schema":null},"core:window":{"default_permission":{"identifier":"default","description":"Default permissions for the plugin.","permissions":["allow-get-all-windows","allow-scale-factor","allow-inner-position","allow-outer-position","allow-inner-size","allow-outer-size","allow-is-fullscreen","allow-is-minimized","allow-is-maximized","allow-is-focused","allow-is-decorated","allow-is-resizable","allow-is-maximizable","allow-is-minimizable","allow-is-closable","allow-is-visible","allow-is-enabled","allow-title","allow-current-monitor","allow-primary-monitor","allow-monitor-from-point","allow-available-monitors","allow-cursor-position","allow-theme","allow-is-always-on-top","allow-internal-toggle-maximize"]},"permissions":{"allow-available-monitors":{"identifier":"allow-available-monitors","description":"Enables the available_monitors command without any pre-configured scope.","commands":{"allow":["available_monitors"],"deny":[]}},"allow-center":{"identifier":"allow-center","description":"Enables the center command without any pre-configured scope.","commands":{"allow":["center"],"deny":[]}},"allow-close":{"identifier":"allow-close","description":"Enables the close command without any pre-configured scope.","commands":{"allow":["close"],"deny":[]}},"allow-create":{"identifier":"allow-create","description":"Enables the create command without any pre-configured scope.","commands":{"allow":["create"],"deny":[]}},"allow-current-monitor":{"identifier":"allow-current-monitor","description":"Enables the current_monitor command without any pre-configured scope.","commands":{"allow":["current_monitor"],"deny":[]}},"allow-cursor-position":{"identifier":"allow-cursor-position","description":"Enables the cursor_position command without any pre-configured scope.","commands":{"allow":["cursor_position"],"deny":[]}},"allow-destroy":{"identifier":"allow-destroy","description":"Enables the destroy command without any pre-configured scope.","commands":{"allow":["destroy"],"deny":[]}},"allow-get-all-windows":{"identifier":"allow-get-all-windows","description":"Enables the get_all_windows command without any pre-configured scope.","commands":{"allow":["get_all_windows"],"deny":[]}},"allow-hide":{"identifier":"allow-hide","description":"Enables the hide command without any pre-configured scope.","commands":{"allow":["hide"],"deny":[]}},"allow-inner-position":{"identifier":"allow-inner-position","description":"Enables the inner_position command without any pre-configured scope.","commands":{"allow":["inner_position"],"deny":[]}},"allow-inner-size":{"identifier":"allow-inner-size","description":"Enables the inner_size command without any pre-configured scope.","commands":{"allow":["inner_size"],"deny":[]}},"allow-internal-toggle-maximize":{"identifier":"allow-internal-toggle-maximize","description":"Enables the internal_toggle_maximize command without any pre-configured scope.","commands":{"allow":["internal_toggle_maximize"],"deny":[]}},"allow-is-always-on-top":{"identifier":"allow-is-always-on-top","description":"Enables the is_always_on_top command without any pre-configured scope.","commands":{"allow":["is_always_on_top"],"deny":[]}},"allow-is-closable":{"identifier":"allow-is-closable","description":"Enables the is_closable command without any pre-configured scope.","commands":{"allow":["is_closable"],"deny":[]}},"allow-is-decorated":{"identifier":"allow-is-decorated","description":"Enables the is_decorated command without any pre-configured scope.","commands":{"allow":["is_decorated"],"deny":[]}},"allow-is-enabled":{"identifier":"allow-is-enabled","description":"Enables the is_enabled command without any pre-configured scope.","commands":{"allow":["is_enabled"],"deny":[]}},"allow-is-focused":{"identifier":"allow-is-focused","description":"Enables the is_focused command without any pre-configured scope.","commands":{"allow":["is_focused"],"deny":[]}},"allow-is-fullscreen":{"identifier":"allow-is-fullscreen","description":"Enables the is_fullscreen command without any pre-configured scope.","commands":{"allow":["is_fullscreen"],"deny":[]}},"allow-is-maximizable":{"identifier":"allow-is-maximizable","description":"Enables the is_maximizable command without any pre-configured scope.","commands":{"allow":["is_maximizable"],"deny":[]}},"allow-is-maximized":{"identifier":"allow-is-maximized","description":"Enables the is_maximized command without any pre-configured scope.","commands":{"allow":["is_maximized"],"deny":[]}},"allow-is-minimizable":{"identifier":"allow-is-minimizable","description":"Enables the is_minimizable command without any pre-configured scope.","commands":{"allow":["is_minimizable"],"deny":[]}},"allow-is-minimized":{"identifier":"allow-is-minimized","description":"Enables the is_minimized command without any pre-configured scope.","commands":{"allow":["is_minimized"],"deny":[]}},"allow-is-resizable":{"identifier":"allow-is-resizable","description":"Enables the is_resizable command without any pre-configured scope.","commands":{"allow":["is_resizable"],"deny":[]}},"allow-is-visible":{"identifier":"allow-is-visible","description":"Enables the is_visible command without any pre-configured scope.","commands":{"allow":["is_visible"],"deny":[]}},"allow-maximize":{"identifier":"allow-maximize","description":"Enables the maximize command without any pre-configured scope.","commands":{"allow":["maximize"],"deny":[]}},"allow-minimize":{"identifier":"allow-minimize","description":"Enables the minimize command without any pre-configured scope.","commands":{"allow":["minimize"],"deny":[]}},"allow-monitor-from-point":{"identifier":"allow-monitor-from-point","description":"Enables the monitor_from_point command without any pre-configured scope.","commands":{"allow":["monitor_from_point"],"deny":[]}},"allow-outer-position":{"identifier":"allow-outer-position","description":"Enables the outer_position command without any pre-configured scope.","commands":{"allow":["outer_position"],"deny":[]}},"allow-outer-size":{"identifier":"allow-outer-size","description":"Enables the outer_size command without any pre-configured scope.","commands":{"allow":["outer_size"],"deny":[]}},"allow-primary-monitor":{"identifier":"allow-primary-monitor","description":"Enables the primary_monitor command without any pre-configured scope.","commands":{"allow":["primary_monitor"],"deny":[]}},"allow-request-user-attention":{"identifier":"allow-request-user-attention","description":"Enables the request_user_attention command without any pre-configured scope.","commands":{"allow":["request_user_attention"],"deny":[]}},"allow-scale-factor":{"identifier":"allow-scale-factor","description":"Enables the scale_factor command without any pre-configured scope.","commands":{"allow":["scale_factor"],"deny":[]}},"allow-set-always-on-bottom":{"identifier":"allow-set-always-on-bottom","description":"Enables the set_always_on_bottom command without any pre-configured scope.","commands":{"allow":["set_always_on_bottom"],"deny":[]}},"allow-set-always-on-top":{"identifier":"allow-set-always-on-top","description":"Enables the set_always_on_top command without any pre-configured scope.","commands":{"allow":["set_always_on_top"],"deny":[]}},"allow-set-background-color":{"identifier":"allow-set-background-color","description":"Enables the set_background_color command without any pre-configured scope.","commands":{"allow":["set_background_color"],"deny":[]}},"allow-set-badge-count":{"identifier":"allow-set-badge-count","description":"Enables the set_badge_count command without any pre-configured scope.","commands":{"allow":["set_badge_count"],"deny":[]}},"allow-set-badge-label":{"identifier":"allow-set-badge-label","description":"Enables the set_badge_label command without any pre-configured scope.","commands":{"allow":["set_badge_label"],"deny":[]}},"allow-set-closable":{"identifier":"allow-set-closable","description":"Enables the set_closable command without any pre-configured scope.","commands":{"allow":["set_closable"],"deny":[]}},"allow-set-content-protected":{"identifier":"allow-set-content-protected","description":"Enables the set_content_protected command without any pre-configured scope.","commands":{"allow":["set_content_protected"],"deny":[]}},"allow-set-cursor-grab":{"identifier":"allow-set-cursor-grab","description":"Enables the set_cursor_grab command without any pre-configured scope.","commands":{"allow":["set_cursor_grab"],"deny":[]}},"allow-set-cursor-icon":{"identifier":"allow-set-cursor-icon","description":"Enables the set_cursor_icon command without any pre-configured scope.","commands":{"allow":["set_cursor_icon"],"deny":[]}},"allow-set-cursor-position":{"identifier":"allow-set-cursor-position","description":"Enables the set_cursor_position command without any pre-configured scope.","commands":{"allow":["set_cursor_position"],"deny":[]}},"allow-set-cursor-visible":{"identifier":"allow-set-cursor-visible","description":"Enables the set_cursor_visible command without any pre-configured scope.","commands":{"allow":["set_cursor_visible"],"deny":[]}},"allow-set-decorations":{"identifier":"allow-set-decorations","description":"Enables the set_decorations command without any pre-configured scope.","commands":{"allow":["set_decorations"],"deny":[]}},"allow-set-effects":{"identifier":"allow-set-effects","description":"Enables the set_effects command without any pre-configured scope.","commands":{"allow":["set_effects"],"deny":[]}},"allow-set-enabled":{"identifier":"allow-set-enabled","description":"Enables the set_enabled command without any pre-configured scope.","commands":{"allow":["set_enabled"],"deny":[]}},"allow-set-focus":{"identifier":"allow-set-focus","description":"Enables the set_focus command without any pre-configured scope.","commands":{"allow":["set_focus"],"deny":[]}},"allow-set-focusable":{"identifier":"allow-set-focusable","description":"Enables the set_focusable command without any pre-configured scope.","commands":{"allow":["set_focusable"],"deny":[]}},"allow-set-fullscreen":{"identifier":"allow-set-fullscreen","description":"Enables the set_fullscreen command without any pre-configured scope.","commands":{"allow":["set_fullscreen"],"deny":[]}},"allow-set-icon":{"identifier":"allow-set-icon","description":"Enables the set_icon command without any pre-configured scope.","commands":{"allow":["set_icon"],"deny":[]}},"allow-set-ignore-cursor-events":{"identifier":"allow-set-ignore-cursor-events","description":"Enables the set_ignore_cursor_events command without any pre-configured scope.","commands":{"allow":["set_ignore_cursor_events"],"deny":[]}},"allow-set-max-size":{"identifier":"allow-set-max-size","description":"Enables the set_max_size command without any pre-configured scope.","commands":{"allow":["set_max_size"],"deny":[]}},"allow-set-maximizable":{"identifier":"allow-set-maximizable","description":"Enables the set_maximizable command without any pre-configured scope.","commands":{"allow":["set_maximizable"],"deny":[]}},"allow-set-min-size":{"identifier":"allow-set-min-size","description":"Enables the set_min_size command without any pre-configured scope.","commands":{"allow":["set_min_size"],"deny":[]}},"allow-set-minimizable":{"identifier":"allow-set-minimizable","description":"Enables the set_minimizable command without any pre-configured scope.","commands":{"allow":["set_minimizable"],"deny":[]}},"allow-set-overlay-icon":{"identifier":"allow-set-overlay-icon","description":"Enables the set_overlay_icon command without any pre-configured scope.","commands":{"allow":["set_overlay_icon"],"deny":[]}},"allow-set-position":{"identifier":"allow-set-position","description":"Enables the set_position command without any pre-configured scope.","commands":{"allow":["set_position"],"deny":[]}},"allow-set-progress-bar":{"identifier":"allow-set-progress-bar","description":"Enables the set_progress_bar command without any pre-configured scope.","commands":{"allow":["set_progress_bar"],"deny":[]}},"allow-set-resizable":{"identifier":"allow-set-resizable","description":"Enables the set_resizable command without any pre-configured scope.","commands":{"allow":["set_resizable"],"deny":[]}},"allow-set-shadow":{"identifier":"allow-set-shadow","description":"Enables the set_shadow command without any pre-configured scope.","commands":{"allow":["set_shadow"],"deny":[]}},"allow-set-simple-fullscreen":{"identifier":"allow-set-simple-fullscreen","description":"Enables the set_simple_fullscreen command without any pre-configured scope.","commands":{"allow":["set_simple_fullscreen"],"deny":[]}},"allow-set-size":{"identifier":"allow-set-size","description":"Enables the set_size command without any pre-configured scope.","commands":{"allow":["set_size"],"deny":[]}},"allow-set-size-constraints":{"identifier":"allow-set-size-constraints","description":"Enables the set_size_constraints command without any pre-configured scope.","commands":{"allow":["set_size_constraints"],"deny":[]}},"allow-set-skip-taskbar":{"identifier":"allow-set-skip-taskbar","description":"Enables the set_skip_taskbar command without any pre-configured scope.","commands":{"allow":["set_skip_taskbar"],"deny":[]}},"allow-set-theme":{"identifier":"allow-set-theme","description":"Enables the set_theme command without any pre-configured scope.","commands":{"allow":["set_theme"],"deny":[]}},"allow-set-title":{"identifier":"allow-set-title","description":"Enables the set_title command without any pre-configured scope.","commands":{"allow":["set_title"],"deny":[]}},"allow-set-title-bar-style":{"identifier":"allow-set-title-bar-style","description":"Enables the set_title_bar_style command without any pre-configured scope.","commands":{"allow":["set_title_bar_style"],"deny":[]}},"allow-set-visible-on-all-workspaces":{"identifier":"allow-set-visible-on-all-workspaces","description":"Enables the set_visible_on_all_workspaces command without any pre-configured scope.","commands":{"allow":["set_visible_on_all_workspaces"],"deny":[]}},"allow-show":{"identifier":"allow-show","description":"Enables the show command without any pre-configured scope.","commands":{"allow":["show"],"deny":[]}},"allow-start-dragging":{"identifier":"allow-start-dragging","description":"Enables the start_dragging command without any pre-configured scope.","commands":{"allow":["start_dragging"],"deny":[]}},"allow-start-resize-dragging":{"identifier":"allow-start-resize-dragging","description":"Enables the start_resize_dragging command without any pre-configured scope.","commands":{"allow":["start_resize_dragging"],"deny":[]}},"allow-theme":{"identifier":"allow-theme","description":"Enables the theme command without any pre-configured scope.","commands":{"allow":["theme"],"deny":[]}},"allow-title":{"identifier":"allow-title","description":"Enables the title command without any pre-configured scope.","commands":{"allow":["title"],"deny":[]}},"allow-toggle-maximize":{"identifier":"allow-toggle-maximize","description":"Enables the toggle_maximize command without any pre-configured scope.","commands":{"allow":["toggle_maximize"],"deny":[]}},"allow-unmaximize":{"identifier":"allow-unmaximize","description":"Enables the unmaximize command without any pre-configured scope.","commands":{"allow":["unmaximize"],"deny":[]}},"allow-unminimize":{"identifier":"allow-unminimize","description":"Enables the unminimize command without any pre-configured scope.","commands":{"allow":["unminimize"],"deny":[]}},"deny-available-monitors":{"identifier":"deny-available-monitors","description":"Denies the available_monitors command without any pre-configured scope.","commands":{"allow":[],"deny":["available_monitors"]}},"deny-center":{"identifier":"deny-center","description":"Denies the center command without any pre-configured scope.","commands":{"allow":[],"deny":["center"]}},"deny-close":{"identifier":"deny-close","description":"Denies the close command without any pre-configured scope.","commands":{"allow":[],"deny":["close"]}},"deny-create":{"identifier":"deny-create","description":"Denies the create command without any pre-configured scope.","commands":{"allow":[],"deny":["create"]}},"deny-current-monitor":{"identifier":"deny-current-monitor","description":"Denies the current_monitor command without any pre-configured scope.","commands":{"allow":[],"deny":["current_monitor"]}},"deny-cursor-position":{"identifier":"deny-cursor-position","description":"Denies the cursor_position command without any pre-configured scope.","commands":{"allow":[],"deny":["cursor_position"]}},"deny-destroy":{"identifier":"deny-destroy","description":"Denies the destroy command without any pre-configured scope.","commands":{"allow":[],"deny":["destroy"]}},"deny-get-all-windows":{"identifier":"deny-get-all-windows","description":"Denies the get_all_windows command without any pre-configured scope.","commands":{"allow":[],"deny":["get_all_windows"]}},"deny-hide":{"identifier":"deny-hide","description":"Denies the hide command without any pre-configured scope.","commands":{"allow":[],"deny":["hide"]}},"deny-inner-position":{"identifier":"deny-inner-position","description":"Denies the inner_position command without any pre-configured scope.","commands":{"allow":[],"deny":["inner_position"]}},"deny-inner-size":{"identifier":"deny-inner-size","description":"Denies the inner_size command without any pre-configured scope.","commands":{"allow":[],"deny":["inner_size"]}},"deny-internal-toggle-maximize":{"identifier":"deny-internal-toggle-maximize","description":"Denies the internal_toggle_maximize command without any pre-configured scope.","commands":{"allow":[],"deny":["internal_toggle_maximize"]}},"deny-is-always-on-top":{"identifier":"deny-is-always-on-top","description":"Denies the is_always_on_top command without any pre-configured scope.","commands":{"allow":[],"deny":["is_always_on_top"]}},"deny-is-closable":{"identifier":"deny-is-closable","description":"Denies the is_closable command without any pre-configured scope.","commands":{"allow":[],"deny":["is_closable"]}},"deny-is-decorated":{"identifier":"deny-is-decorated","description":"Denies the is_decorated command without any pre-configured scope.","commands":{"allow":[],"deny":["is_decorated"]}},"deny-is-enabled":{"identifier":"deny-is-enabled","description":"Denies the is_enabled command without any pre-configured scope.","commands":{"allow":[],"deny":["is_enabled"]}},"deny-is-focused":{"identifier":"deny-is-focused","description":"Denies the is_focused command without any pre-configured scope.","commands":{"allow":[],"deny":["is_focused"]}},"deny-is-fullscreen":{"identifier":"deny-is-fullscreen","description":"Denies the is_fullscreen command without any pre-configured scope.","commands":{"allow":[],"deny":["is_fullscreen"]}},"deny-is-maximizable":{"identifier":"deny-is-maximizable","description":"Denies the is_maximizable command without any pre-configured scope.","commands":{"allow":[],"deny":["is_maximizable"]}},"deny-is-maximized":{"identifier":"deny-is-maximized","description":"Denies the is_maximized command without any pre-configured scope.","commands":{"allow":[],"deny":["is_maximized"]}},"deny-is-minimizable":{"identifier":"deny-is-minimizable","description":"Denies the is_minimizable command without any pre-configured scope.","commands":{"allow":[],"deny":["is_minimizable"]}},"deny-is-minimized":{"identifier":"deny-is-minimized","description":"Denies the is_minimized command without any pre-configured scope.","commands":{"allow":[],"deny":["is_minimized"]}},"deny-is-resizable":{"identifier":"deny-is-resizable","description":"Denies the is_resizable command without any pre-configured scope.","commands":{"allow":[],"deny":["is_resizable"]}},"deny-is-visible":{"identifier":"deny-is-visible","description":"Denies the is_visible command without any pre-configured scope.","commands":{"allow":[],"deny":["is_visible"]}},"deny-maximize":{"identifier":"deny-maximize","description":"Denies the maximize command without any pre-configured scope.","commands":{"allow":[],"deny":["maximize"]}},"deny-minimize":{"identifier":"deny-minimize","description":"Denies the minimize command without any pre-configured scope.","commands":{"allow":[],"deny":["minimize"]}},"deny-monitor-from-point":{"identifier":"deny-monitor-from-point","description":"Denies the monitor_from_point command without any pre-configured scope.","commands":{"allow":[],"deny":["monitor_from_point"]}},"deny-outer-position":{"identifier":"deny-outer-position","description":"Denies the outer_position command without any pre-configured scope.","commands":{"allow":[],"deny":["outer_position"]}},"deny-outer-size":{"identifier":"deny-outer-size","description":"Denies the outer_size command without any pre-configured scope.","commands":{"allow":[],"deny":["outer_size"]}},"deny-primary-monitor":{"identifier":"deny-primary-monitor","description":"Denies the primary_monitor command without any pre-configured scope.","commands":{"allow":[],"deny":["primary_monitor"]}},"deny-request-user-attention":{"identifier":"deny-request-user-attention","description":"Denies the request_user_attention command without any pre-configured scope.","commands":{"allow":[],"deny":["request_user_attention"]}},"deny-scale-factor":{"identifier":"deny-scale-factor","description":"Denies the scale_factor command without any pre-configured scope.","commands":{"allow":[],"deny":["scale_factor"]}},"deny-set-always-on-bottom":{"identifier":"deny-set-always-on-bottom","description":"Denies the set_always_on_bottom command without any pre-configured scope.","commands":{"allow":[],"deny":["set_always_on_bottom"]}},"deny-set-always-on-top":{"identifier":"deny-set-always-on-top","description":"Denies the set_always_on_top command without any pre-configured scope.","commands":{"allow":[],"deny":["set_always_on_top"]}},"deny-set-background-color":{"identifier":"deny-set-background-color","description":"Denies the set_background_color command without any pre-configured scope.","commands":{"allow":[],"deny":["set_background_color"]}},"deny-set-badge-count":{"identifier":"deny-set-badge-count","description":"Denies the set_badge_count command without any pre-configured scope.","commands":{"allow":[],"deny":["set_badge_count"]}},"deny-set-badge-label":{"identifier":"deny-set-badge-label","description":"Denies the set_badge_label command without any pre-configured scope.","commands":{"allow":[],"deny":["set_badge_label"]}},"deny-set-closable":{"identifier":"deny-set-closable","description":"Denies the set_closable command without any pre-configured scope.","commands":{"allow":[],"deny":["set_closable"]}},"deny-set-content-protected":{"identifier":"deny-set-content-protected","description":"Denies the set_content_protected command without any pre-configured scope.","commands":{"allow":[],"deny":["set_content_protected"]}},"deny-set-cursor-grab":{"identifier":"deny-set-cursor-grab","description":"Denies the set_cursor_grab command without any pre-configured scope.","commands":{"allow":[],"deny":["set_cursor_grab"]}},"deny-set-cursor-icon":{"identifier":"deny-set-cursor-icon","description":"Denies the set_cursor_icon command without any pre-configured scope.","commands":{"allow":[],"deny":["set_cursor_icon"]}},"deny-set-cursor-position":{"identifier":"deny-set-cursor-position","description":"Denies the set_cursor_position command without any pre-configured scope.","commands":{"allow":[],"deny":["set_cursor_position"]}},"deny-set-cursor-visible":{"identifier":"deny-set-cursor-visible","description":"Denies the set_cursor_visible command without any pre-configured scope.","commands":{"allow":[],"deny":["set_cursor_visible"]}},"deny-set-decorations":{"identifier":"deny-set-decorations","description":"Denies the set_decorations command without any pre-configured scope.","commands":{"allow":[],"deny":["set_decorations"]}},"deny-set-effects":{"identifier":"deny-set-effects","description":"Denies the set_effects command without any pre-configured scope.","commands":{"allow":[],"deny":["set_effects"]}},"deny-set-enabled":{"identifier":"deny-set-enabled","description":"Denies the set_enabled command without any pre-configured scope.","commands":{"allow":[],"deny":["set_enabled"]}},"deny-set-focus":{"identifier":"deny-set-focus","description":"Denies the set_focus command without any pre-configured scope.","commands":{"allow":[],"deny":["set_focus"]}},"deny-set-focusable":{"identifier":"deny-set-focusable","description":"Denies the set_focusable command without any pre-configured scope.","commands":{"allow":[],"deny":["set_focusable"]}},"deny-set-fullscreen":{"identifier":"deny-set-fullscreen","description":"Denies the set_fullscreen command without any pre-configured scope.","commands":{"allow":[],"deny":["set_fullscreen"]}},"deny-set-icon":{"identifier":"deny-set-icon","description":"Denies the set_icon command without any pre-configured scope.","commands":{"allow":[],"deny":["set_icon"]}},"deny-set-ignore-cursor-events":{"identifier":"deny-set-ignore-cursor-events","description":"Denies the set_ignore_cursor_events command without any pre-configured scope.","commands":{"allow":[],"deny":["set_ignore_cursor_events"]}},"deny-set-max-size":{"identifier":"deny-set-max-size","description":"Denies the set_max_size command without any pre-configured scope.","commands":{"allow":[],"deny":["set_max_size"]}},"deny-set-maximizable":{"identifier":"deny-set-maximizable","description":"Denies the set_maximizable command without any pre-configured scope.","commands":{"allow":[],"deny":["set_maximizable"]}},"deny-set-min-size":{"identifier":"deny-set-min-size","description":"Denies the set_min_size command without any pre-configured scope.","commands":{"allow":[],"deny":["set_min_size"]}},"deny-set-minimizable":{"identifier":"deny-set-minimizable","description":"Denies the set_minimizable command without any pre-configured scope.","commands":{"allow":[],"deny":["set_minimizable"]}},"deny-set-overlay-icon":{"identifier":"deny-set-overlay-icon","description":"Denies the set_overlay_icon command without any pre-configured scope.","commands":{"allow":[],"deny":["set_overlay_icon"]}},"deny-set-position":{"identifier":"deny-set-position","description":"Denies the set_position command without any pre-configured scope.","commands":{"allow":[],"deny":["set_position"]}},"deny-set-progress-bar":{"identifier":"deny-set-progress-bar","description":"Denies the set_progress_bar command without any pre-configured scope.","commands":{"allow":[],"deny":["set_progress_bar"]}},"deny-set-resizable":{"identifier":"deny-set-resizable","description":"Denies the set_resizable command without any pre-configured scope.","commands":{"allow":[],"deny":["set_resizable"]}},"deny-set-shadow":{"identifier":"deny-set-shadow","description":"Denies the set_shadow command without any pre-configured scope.","commands":{"allow":[],"deny":["set_shadow"]}},"deny-set-simple-fullscreen":{"identifier":"deny-set-simple-fullscreen","description":"Denies the set_simple_fullscreen command without any pre-configured scope.","commands":{"allow":[],"deny":["set_simple_fullscreen"]}},"deny-set-size":{"identifier":"deny-set-size","description":"Denies the set_size command without any pre-configured scope.","commands":{"allow":[],"deny":["set_size"]}},"deny-set-size-constraints":{"identifier":"deny-set-size-constraints","description":"Denies the set_size_constraints command without any pre-configured scope.","commands":{"allow":[],"deny":["set_size_constraints"]}},"deny-set-skip-taskbar":{"identifier":"deny-set-skip-taskbar","description":"Denies the set_skip_taskbar command without any pre-configured scope.","commands":{"allow":[],"deny":["set_skip_taskbar"]}},"deny-set-theme":{"identifier":"deny-set-theme","description":"Denies the set_theme command without any pre-configured scope.","commands":{"allow":[],"deny":["set_theme"]}},"deny-set-title":{"identifier":"deny-set-title","description":"Denies the set_title command without any pre-configured scope.","commands":{"allow":[],"deny":["set_title"]}},"deny-set-title-bar-style":{"identifier":"deny-set-title-bar-style","description":"Denies the set_title_bar_style command without any pre-configured scope.","commands":{"allow":[],"deny":["set_title_bar_style"]}},"deny-set-visible-on-all-workspaces":{"identifier":"deny-set-visible-on-all-workspaces","description":"Denies the set_visible_on_all_workspaces command without any pre-configured scope.","commands":{"allow":[],"deny":["set_visible_on_all_workspaces"]}},"deny-show":{"identifier":"deny-show","description":"Denies the show command without any pre-configured scope.","commands":{"allow":[],"deny":["show"]}},"deny-start-dragging":{"identifier":"deny-start-dragging","description":"Denies the start_dragging command without any pre-configured scope.","commands":{"allow":[],"deny":["start_dragging"]}},"deny-start-resize-dragging":{"identifier":"deny-start-resize-dragging","description":"Denies the start_resize_dragging command without any pre-configured scope.","commands":{"allow":[],"deny":["start_resize_dragging"]}},"deny-theme":{"identifier":"deny-theme","description":"Denies the theme command without any pre-configured scope.","commands":{"allow":[],"deny":["theme"]}},"deny-title":{"identifier":"deny-title","description":"Denies the title command without any pre-configured scope.","commands":{"allow":[],"deny":["title"]}},"deny-toggle-maximize":{"identifier":"deny-toggle-maximize","description":"Denies the toggle_maximize command without any pre-configured scope.","commands":{"allow":[],"deny":["toggle_maximize"]}},"deny-unmaximize":{"identifier":"deny-unmaximize","description":"Denies the unmaximize command without any pre-configured scope.","commands":{"allow":[],"deny":["unmaximize"]}},"deny-unminimize":{"identifier":"deny-unminimize","description":"Denies the unminimize command without any pre-configured scope.","commands":{"allow":[],"deny":["unminimize"]}}},"permission_sets":{},"global_scope_schema":null}} \ No newline at end of file diff --git a/web/src-tauri/gen/schemas/desktop-schema.json b/web/src-tauri/gen/schemas/desktop-schema.json index 18fd3095..260dbe05 100644 --- a/web/src-tauri/gen/schemas/desktop-schema.json +++ b/web/src-tauri/gen/schemas/desktop-schema.json @@ -183,10 +183,10 @@ "markdownDescription": "Default core plugins set.\n#### This default permission set includes:\n\n- `core:path:default`\n- `core:event:default`\n- `core:window:default`\n- `core:webview:default`\n- `core:app:default`\n- `core:image:default`\n- `core:resources:default`\n- `core:menu:default`\n- `core:tray:default`" }, { - "description": "Default permissions for the plugin.\n#### This default permission set includes:\n\n- `allow-version`\n- `allow-name`\n- `allow-tauri-version`\n- `allow-identifier`", + "description": "Default permissions for the plugin.\n#### This default permission set includes:\n\n- `allow-version`\n- `allow-name`\n- `allow-tauri-version`\n- `allow-identifier`\n- `allow-bundle-type`\n- `allow-register-listener`\n- `allow-remove-listener`", "type": "string", "const": "core:app:default", - "markdownDescription": "Default permissions for the plugin.\n#### This default permission set includes:\n\n- `allow-version`\n- `allow-name`\n- `allow-tauri-version`\n- `allow-identifier`" + "markdownDescription": "Default permissions for the plugin.\n#### This default permission set includes:\n\n- `allow-version`\n- `allow-name`\n- `allow-tauri-version`\n- `allow-identifier`\n- `allow-bundle-type`\n- `allow-register-listener`\n- `allow-remove-listener`" }, { "description": "Enables the app_hide command without any pre-configured scope.", @@ -200,6 +200,12 @@ "const": "core:app:allow-app-show", "markdownDescription": "Enables the app_show command without any pre-configured scope." }, + { + "description": "Enables the bundle_type command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-bundle-type", + "markdownDescription": "Enables the bundle_type command without any pre-configured scope." + }, { "description": "Enables the default_window_icon command without any pre-configured scope.", "type": "string", @@ -224,12 +230,24 @@ "const": "core:app:allow-name", "markdownDescription": "Enables the name command without any pre-configured scope." }, + { + "description": "Enables the register_listener command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-register-listener", + "markdownDescription": "Enables the register_listener command without any pre-configured scope." + }, { "description": "Enables the remove_data_store command without any pre-configured scope.", "type": "string", "const": "core:app:allow-remove-data-store", "markdownDescription": "Enables the remove_data_store command without any pre-configured scope." }, + { + "description": "Enables the remove_listener command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-remove-listener", + "markdownDescription": "Enables the remove_listener command without any pre-configured scope." + }, { "description": "Enables the set_app_theme command without any pre-configured scope.", "type": "string", @@ -266,6 +284,12 @@ "const": "core:app:deny-app-show", "markdownDescription": "Denies the app_show command without any pre-configured scope." }, + { + "description": "Denies the bundle_type command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-bundle-type", + "markdownDescription": "Denies the bundle_type command without any pre-configured scope." + }, { "description": "Denies the default_window_icon command without any pre-configured scope.", "type": "string", @@ -290,12 +314,24 @@ "const": "core:app:deny-name", "markdownDescription": "Denies the name command without any pre-configured scope." }, + { + "description": "Denies the register_listener command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-register-listener", + "markdownDescription": "Denies the register_listener command without any pre-configured scope." + }, { "description": "Denies the remove_data_store command without any pre-configured scope.", "type": "string", "const": "core:app:deny-remove-data-store", "markdownDescription": "Denies the remove_data_store command without any pre-configured scope." }, + { + "description": "Denies the remove_listener command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-remove-listener", + "markdownDescription": "Denies the remove_listener command without any pre-configured scope." + }, { "description": "Denies the set_app_theme command without any pre-configured scope.", "type": "string", @@ -1478,6 +1514,12 @@ "const": "core:window:allow-set-focus", "markdownDescription": "Enables the set_focus command without any pre-configured scope." }, + { + "description": "Enables the set_focusable command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-focusable", + "markdownDescription": "Enables the set_focusable command without any pre-configured scope." + }, { "description": "Enables the set_fullscreen command without any pre-configured scope.", "type": "string", @@ -1550,6 +1592,12 @@ "const": "core:window:allow-set-shadow", "markdownDescription": "Enables the set_shadow command without any pre-configured scope." }, + { + "description": "Enables the set_simple_fullscreen command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-simple-fullscreen", + "markdownDescription": "Enables the set_simple_fullscreen command without any pre-configured scope." + }, { "description": "Enables the set_size command without any pre-configured scope.", "type": "string", @@ -1922,6 +1970,12 @@ "const": "core:window:deny-set-focus", "markdownDescription": "Denies the set_focus command without any pre-configured scope." }, + { + "description": "Denies the set_focusable command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-focusable", + "markdownDescription": "Denies the set_focusable command without any pre-configured scope." + }, { "description": "Denies the set_fullscreen command without any pre-configured scope.", "type": "string", @@ -1994,6 +2048,12 @@ "const": "core:window:deny-set-shadow", "markdownDescription": "Denies the set_shadow command without any pre-configured scope." }, + { + "description": "Denies the set_simple_fullscreen command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-simple-fullscreen", + "markdownDescription": "Denies the set_simple_fullscreen command without any pre-configured scope." + }, { "description": "Denies the set_size command without any pre-configured scope.", "type": "string", diff --git a/web/src-tauri/gen/schemas/linux-schema.json b/web/src-tauri/gen/schemas/linux-schema.json index 18fd3095..260dbe05 100644 --- a/web/src-tauri/gen/schemas/linux-schema.json +++ b/web/src-tauri/gen/schemas/linux-schema.json @@ -183,10 +183,10 @@ "markdownDescription": "Default core plugins set.\n#### This default permission set includes:\n\n- `core:path:default`\n- `core:event:default`\n- `core:window:default`\n- `core:webview:default`\n- `core:app:default`\n- `core:image:default`\n- `core:resources:default`\n- `core:menu:default`\n- `core:tray:default`" }, { - "description": "Default permissions for the plugin.\n#### This default permission set includes:\n\n- `allow-version`\n- `allow-name`\n- `allow-tauri-version`\n- `allow-identifier`", + "description": "Default permissions for the plugin.\n#### This default permission set includes:\n\n- `allow-version`\n- `allow-name`\n- `allow-tauri-version`\n- `allow-identifier`\n- `allow-bundle-type`\n- `allow-register-listener`\n- `allow-remove-listener`", "type": "string", "const": "core:app:default", - "markdownDescription": "Default permissions for the plugin.\n#### This default permission set includes:\n\n- `allow-version`\n- `allow-name`\n- `allow-tauri-version`\n- `allow-identifier`" + "markdownDescription": "Default permissions for the plugin.\n#### This default permission set includes:\n\n- `allow-version`\n- `allow-name`\n- `allow-tauri-version`\n- `allow-identifier`\n- `allow-bundle-type`\n- `allow-register-listener`\n- `allow-remove-listener`" }, { "description": "Enables the app_hide command without any pre-configured scope.", @@ -200,6 +200,12 @@ "const": "core:app:allow-app-show", "markdownDescription": "Enables the app_show command without any pre-configured scope." }, + { + "description": "Enables the bundle_type command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-bundle-type", + "markdownDescription": "Enables the bundle_type command without any pre-configured scope." + }, { "description": "Enables the default_window_icon command without any pre-configured scope.", "type": "string", @@ -224,12 +230,24 @@ "const": "core:app:allow-name", "markdownDescription": "Enables the name command without any pre-configured scope." }, + { + "description": "Enables the register_listener command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-register-listener", + "markdownDescription": "Enables the register_listener command without any pre-configured scope." + }, { "description": "Enables the remove_data_store command without any pre-configured scope.", "type": "string", "const": "core:app:allow-remove-data-store", "markdownDescription": "Enables the remove_data_store command without any pre-configured scope." }, + { + "description": "Enables the remove_listener command without any pre-configured scope.", + "type": "string", + "const": "core:app:allow-remove-listener", + "markdownDescription": "Enables the remove_listener command without any pre-configured scope." + }, { "description": "Enables the set_app_theme command without any pre-configured scope.", "type": "string", @@ -266,6 +284,12 @@ "const": "core:app:deny-app-show", "markdownDescription": "Denies the app_show command without any pre-configured scope." }, + { + "description": "Denies the bundle_type command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-bundle-type", + "markdownDescription": "Denies the bundle_type command without any pre-configured scope." + }, { "description": "Denies the default_window_icon command without any pre-configured scope.", "type": "string", @@ -290,12 +314,24 @@ "const": "core:app:deny-name", "markdownDescription": "Denies the name command without any pre-configured scope." }, + { + "description": "Denies the register_listener command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-register-listener", + "markdownDescription": "Denies the register_listener command without any pre-configured scope." + }, { "description": "Denies the remove_data_store command without any pre-configured scope.", "type": "string", "const": "core:app:deny-remove-data-store", "markdownDescription": "Denies the remove_data_store command without any pre-configured scope." }, + { + "description": "Denies the remove_listener command without any pre-configured scope.", + "type": "string", + "const": "core:app:deny-remove-listener", + "markdownDescription": "Denies the remove_listener command without any pre-configured scope." + }, { "description": "Denies the set_app_theme command without any pre-configured scope.", "type": "string", @@ -1478,6 +1514,12 @@ "const": "core:window:allow-set-focus", "markdownDescription": "Enables the set_focus command without any pre-configured scope." }, + { + "description": "Enables the set_focusable command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-focusable", + "markdownDescription": "Enables the set_focusable command without any pre-configured scope." + }, { "description": "Enables the set_fullscreen command without any pre-configured scope.", "type": "string", @@ -1550,6 +1592,12 @@ "const": "core:window:allow-set-shadow", "markdownDescription": "Enables the set_shadow command without any pre-configured scope." }, + { + "description": "Enables the set_simple_fullscreen command without any pre-configured scope.", + "type": "string", + "const": "core:window:allow-set-simple-fullscreen", + "markdownDescription": "Enables the set_simple_fullscreen command without any pre-configured scope." + }, { "description": "Enables the set_size command without any pre-configured scope.", "type": "string", @@ -1922,6 +1970,12 @@ "const": "core:window:deny-set-focus", "markdownDescription": "Denies the set_focus command without any pre-configured scope." }, + { + "description": "Denies the set_focusable command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-focusable", + "markdownDescription": "Denies the set_focusable command without any pre-configured scope." + }, { "description": "Denies the set_fullscreen command without any pre-configured scope.", "type": "string", @@ -1994,6 +2048,12 @@ "const": "core:window:deny-set-shadow", "markdownDescription": "Denies the set_shadow command without any pre-configured scope." }, + { + "description": "Denies the set_simple_fullscreen command without any pre-configured scope.", + "type": "string", + "const": "core:window:deny-set-simple-fullscreen", + "markdownDescription": "Denies the set_simple_fullscreen command without any pre-configured scope." + }, { "description": "Denies the set_size command without any pre-configured scope.", "type": "string", diff --git a/web/src-tauri/src/lib.rs b/web/src-tauri/src/lib.rs index 417a183e..e2423297 100644 --- a/web/src-tauri/src/lib.rs +++ b/web/src-tauri/src/lib.rs @@ -2,7 +2,8 @@ #![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] use directories::ProjectDirs; -use serde::{Deserialize, Serialize}; +use serde::{Deserialize, Deserializer, Serialize}; +use std::collections::{HashMap, HashSet}; use std::fs; use std::fs::File; use std::fs::OpenOptions; @@ -11,6 +12,51 @@ use std::io::Write; use std::path::PathBuf; use tauri::command; +fn deserialize_categories<'de, D>(deserializer: D) -> Result, D::Error> +where + D: Deserializer<'de>, +{ + use serde::de::{self, Visitor}; + use std::fmt; + + struct CategoriesVisitor; + + impl<'de> Visitor<'de> for CategoriesVisitor { + type Value = HashMap; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("a string or a map") + } + + fn visit_str(self, value: &str) -> Result + where + E: de::Error, + { + // Convert comma-separated string to HashMap + let mut map = HashMap::new(); + if !value.is_empty() && value != "{}" { + for (i, category) in value.split(',').enumerate() { + map.insert(i.to_string(), category.trim().to_string()); + } + } + Ok(map) + } + + fn visit_map(self, mut map: M) -> Result + where + M: de::MapAccess<'de>, + { + let mut categories = HashMap::new(); + while let Some((key, value)) = map.next_entry()? { + categories.insert(key, value); + } + Ok(categories) + } + } + + deserializer.deserialize_any(CategoriesVisitor) +} + // Define the structure for the file entries #[derive(Serialize, Deserialize)] struct FileEntry { @@ -150,7 +196,10 @@ async fn update_local_db(mut episode_info: EpisodeInfo) -> Result<(), String> { Vec::new() }; - episodes.push(episode_info); + // Check if episode already exists before adding + if !episodes.iter().any(|ep| ep.episodeid == episode_info.episodeid) { + episodes.push(episode_info); + } let file = OpenOptions::new() .write(true) @@ -163,6 +212,51 @@ async fn update_local_db(mut episode_info: EpisodeInfo) -> Result<(), String> { Ok(()) } +#[command] +async fn remove_multiple_from_local_db(episode_ids: Vec) -> Result<(), String> { + let proj_dirs = get_project_dirs().map_err(|e| e.to_string())?; + let db_path = proj_dirs.data_dir().join("local_episodes.json"); + + let mut episodes = if db_path.exists() { + let data = std::fs::read_to_string(&db_path).map_err(|e| e.to_string())?; + serde_json::from_str::>(&data).map_err(|e| e.to_string())? + } else { + return Ok(()); // No episodes to remove if file doesn't exist + }; + + // Remove episodes with matching IDs + episodes.retain(|episode| !episode_ids.contains(&episode.episodeid)); + + // Write updated episodes back to file + let file = OpenOptions::new() + .write(true) + .create(true) + .truncate(true) + .open(&db_path) + .map_err(|e| e.to_string())?; + serde_json::to_writer(file, &episodes).map_err(|e| e.to_string())?; + + // Delete the audio files and artwork for each episode + for episodeid in episode_ids { + let audio_file_path = proj_dirs + .data_dir() + .join(format!("episode_{}.mp3", episodeid)); + let artwork_file_path = proj_dirs + .data_dir() + .join(format!("artwork_{}.jpg", episodeid)); + + if audio_file_path.exists() { + std::fs::remove_file(audio_file_path).map_err(|e| e.to_string())?; + } + + if artwork_file_path.exists() { + std::fs::remove_file(artwork_file_path).map_err(|e| e.to_string())?; + } + } + + Ok(()) +} + #[command] async fn remove_from_local_db(episodeid: i32) -> Result<(), String> { let proj_dirs = get_project_dirs().map_err(|e| e.to_string())?; @@ -204,6 +298,47 @@ async fn remove_from_local_db(episodeid: i32) -> Result<(), String> { Ok(()) } +#[command] +async fn deduplicate_local_episodes() -> Result<(), String> { + let proj_dirs = get_project_dirs().map_err(|e| e.to_string())?; + let db_path = proj_dirs.data_dir().join("local_episodes.json"); + + if !db_path.exists() { + return Ok(()); + } + + let data = std::fs::read_to_string(&db_path).map_err(|e| e.to_string())?; + let episodes = match serde_json::from_str::>(&data) { + Ok(eps) => eps, + Err(e) => { + println!("JSON parsing error: {}, resetting file", e); + std::fs::write(&db_path, "[]").map_err(|e| e.to_string())?; + return Ok(()); + } + }; + + // Remove duplicates based on episodeid + let mut unique_episodes = Vec::new(); + let mut seen_ids = HashSet::new(); + + for episode in episodes { + if seen_ids.insert(episode.episodeid) { + unique_episodes.push(episode); + } + } + + // Write back the deduplicated episodes + let file = OpenOptions::new() + .write(true) + .create(true) + .truncate(true) + .open(&db_path) + .map_err(|e| e.to_string())?; + serde_json::to_writer(file, &unique_episodes).map_err(|e| e.to_string())?; + + Ok(()) +} + #[command] async fn get_local_episodes() -> Result, String> { let proj_dirs = get_project_dirs().map_err(|e| e.to_string())?; @@ -287,7 +422,8 @@ pub struct PodcastDetails { pub podcastindexid: Option, pub artworkurl: String, pub author: String, - pub categories: String, + #[serde(deserialize_with = "deserialize_categories")] + pub categories: HashMap, pub description: String, pub episodecount: i32, pub explicit: bool, @@ -339,7 +475,8 @@ pub struct Podcast { pub websiteurl: Option, pub feedurl: String, pub author: Option, - pub categories: String, // Keeping as String since it's handled as empty string "{}" or "{}" + #[serde(deserialize_with = "deserialize_categories")] + pub categories: HashMap, pub explicit: bool, // pub is_youtube: bool, } @@ -412,9 +549,11 @@ pub fn run() { delete_file, update_local_db, remove_from_local_db, + remove_multiple_from_local_db, update_podcast_db, get_local_podcasts, get_local_episodes, + deduplicate_local_episodes, list_app_files, get_local_file, start_file_server diff --git a/web/src-tauri/tauri.conf.json b/web/src-tauri/tauri.conf.json index ce44aa6b..dfa3ebdb 100644 --- a/web/src-tauri/tauri.conf.json +++ b/web/src-tauri/tauri.conf.json @@ -1,6 +1,6 @@ { "build": { - "beforeBuildCommand": "trunk build", + "beforeBuildCommand": "", "beforeDevCommand": "RUSTFLAGS='--cfg=web_sys_unstable_apis --cfg getrandom_backend=\"wasm_js\"' trunk serve", "devUrl": "http://localhost:8080", "frontendDist": "../dist" diff --git a/web/src/components/app_drawer.rs b/web/src/components/app_drawer.rs index 36517468..d8a8deef 100644 --- a/web/src/components/app_drawer.rs +++ b/web/src/components/app_drawer.rs @@ -1,7 +1,8 @@ -use super::routes::Route; -use crate::components::context::AppState; +use crate::components::context::{AppState, UserStatsStore}; use crate::components::navigation::use_back_button; -use crate::requests::pod_req::connect_to_episode_websocket; +use crate::pages::routes::Route; +use crate::requests::pod_req::{call_get_pinepods_version, connect_to_episode_websocket}; +use i18nrs::yew::use_translation; use wasm_bindgen_futures::spawn_local; use web_sys::window; use yew::prelude::*; @@ -30,9 +31,29 @@ pub fn back_button() -> Html { #[allow(non_camel_case_types)] #[function_component(App_drawer)] pub fn app_drawer() -> Html { + let (i18n, _) = use_translation(); + let (stats_state, stats_dispatch) = use_store::(); // let selection = use_state(|| "".to_string()); // let (state, _dispatch) = use_store::(); + // Capture i18n strings before they get moved + let i18n_local_downloads = i18n.t("app_drawer.local_downloads").to_string(); + let i18n_pinepods = i18n.t("app_drawer.pinepods").to_string(); + let i18n_home = i18n.t("navigation.home").to_string(); + let i18n_feed = i18n.t("app_drawer.feed").to_string(); + let i18n_search_podcasts = i18n.t("app_drawer.search_podcasts").to_string(); + let i18n_queue = i18n.t("navigation.queue").to_string(); + let i18n_saved = i18n.t("navigation.saved").to_string(); + let i18n_playlists = i18n.t("navigation.playlists").to_string(); + let i18n_history = i18n.t("navigation.history").to_string(); + let i18n_server_downloads = i18n.t("app_drawer.server_downloads").to_string(); + let i18n_local_downloads = i18n.t("app_drawer.local_downloads").to_string(); + let i18n_subscribed_people = i18n.t("app_drawer.subscribed_people").to_string(); + let i18n_podcasts = i18n.t("navigation.podcasts").to_string(); + let i18n_settings = i18n.t("app_drawer.settings").to_string(); + let i18n_sign_out = i18n.t("app_drawer.sign_out").to_string(); + let i18n_loading = i18n.t("common.loading").to_string(); + let is_drawer_open = use_state(|| false); let drawer_rotation = if *is_drawer_open { "rotate-90 transform" @@ -50,6 +71,31 @@ pub fn app_drawer() -> Html { .auth_details .as_ref() .map(|ud| ud.server_name.clone()); + + // Fetch version on component mount if authenticated + { + let stats_dispatch = stats_dispatch.clone(); + let server_name_version = server_name.clone(); + let api_key_version = api_key.clone(); + + use_effect_with((api_key.clone(), server_name.clone()), move |_| { + if let (Some(api_key), Some(server_name)) = + (api_key_version.clone(), server_name_version.clone()) + { + let stats_dispatch = stats_dispatch.clone(); + wasm_bindgen_futures::spawn_local(async move { + if let Ok(version) = + call_get_pinepods_version(server_name.clone(), &api_key).await + { + stats_dispatch.reduce_mut(move |state| { + state.pinepods_version = Some(version); + }); + } + }); + } + || () + }); + } // let session_state = state.clone(); let username = state .user_details @@ -154,7 +200,7 @@ pub fn app_drawer() -> Html { to={Route::LocalDownloads}>
- {"Local Downloads"} + {&i18n_local_downloads}
> @@ -167,15 +213,15 @@ pub fn app_drawer() -> Html {
// Drawer
-
-
+
+
Pinepods Logo -

{"Pinepods"}

+

{&i18n_pinepods}


@@ -202,7 +248,7 @@ pub fn app_drawer() -> Html { to={Route::Home}>
- {"Home"} + {&i18n_home}
>
@@ -212,7 +258,7 @@ pub fn app_drawer() -> Html { to={Route::Feed}>
- {"Feed"} + {&i18n_feed}
>
@@ -222,7 +268,7 @@ pub fn app_drawer() -> Html { to={Route::Search}>
- {"Search Podcasts"} + {&i18n_search_podcasts}
>
@@ -232,7 +278,7 @@ pub fn app_drawer() -> Html { to={Route::Queue}>
- {"Queue"} + {&i18n_queue}
>
@@ -242,7 +288,7 @@ pub fn app_drawer() -> Html { to={Route::Saved}>
- {"Saved"} + {&i18n_saved}
>
@@ -252,7 +298,7 @@ pub fn app_drawer() -> Html { to={Route::Playlists}>
- {"Playlists"} + {&i18n_playlists}
>
@@ -262,7 +308,7 @@ pub fn app_drawer() -> Html { to={Route::PodHistory}>
- {"History"} + {&i18n_history}
>
@@ -272,7 +318,7 @@ pub fn app_drawer() -> Html { to={Route::Downloads}>
- {"Server Downloads"} + {&i18n_server_downloads}
> @@ -287,7 +333,7 @@ pub fn app_drawer() -> Html { to={Route::SubscribedPeople}>
- {"Subscribed People"} + {&i18n_subscribed_people}
> @@ -297,7 +343,7 @@ pub fn app_drawer() -> Html { to={Route::Podcasts}>
- {"Podcasts"} + {&i18n_podcasts}
> @@ -307,7 +353,7 @@ pub fn app_drawer() -> Html { to={Route::Settings}>
- {"Settings"} + {&i18n_settings}
> @@ -320,7 +366,7 @@ pub fn app_drawer() -> Html { to={Route::LogOut}>
- {"Sign Out"} + {&i18n_sign_out}
> @@ -328,6 +374,20 @@ pub fn app_drawer() -> Html { + // Version display at bottom of drawer + { + if let Some(version) = &stats_state.pinepods_version { + html! { +
+
+ { format!("v{}", version) } +
+
+ } + } else { + html! {} + } + } @@ -391,7 +451,7 @@ pub fn app_drawer() -> Html { - {"Loading..."} + {&i18n_loading} }, _ => html! {}, // Covers both Some(false) and None diff --git a/web/src/components/audio.rs b/web/src/components/audio.rs index 140a5a77..6a24c2a3 100644 --- a/web/src/components/audio.rs +++ b/web/src/components/audio.rs @@ -1,22 +1,21 @@ use crate::components::context::{AppState, UIState}; -#[cfg(not(feature = "server_build"))] -use crate::components::downloads_tauri::start_local_file_server; use crate::components::gen_components::{EpisodeModal, FallbackImage}; use crate::components::gen_funcs::format_time_rm_hour; #[cfg(not(feature = "server_build"))] -use crate::requests::pod_req::EpisodeDownload; +use crate::pages::downloads_tauri::start_local_file_server; +use crate::requests::episode::Episode; +use crate::requests::pod_req::call_get_episode_id; use crate::requests::pod_req::FetchPodcasting2DataRequest; use crate::requests::pod_req::{ call_add_history, call_check_episode_in_db, call_fetch_podcasting_2_data, - call_get_auto_skip_times, call_get_episode_id, call_get_play_episode_details, - call_get_podcast_id_from_ep, call_get_queued_episodes, call_increment_listen_time, - call_increment_played, call_mark_episode_completed, call_queue_episode, - call_record_listen_duration, call_remove_queued_episode, HistoryAddRequest, - MarkEpisodeCompletedRequest, QueuePodcastRequest, RecordListenDurationRequest, + call_get_play_episode_details, call_get_podcast_id_from_ep, call_get_queued_episodes, + call_increment_listen_time, call_increment_played, call_mark_episode_completed, + call_queue_episode, call_record_listen_duration, call_remove_queued_episode, + call_update_episode_duration, HistoryAddRequest, MarkEpisodeCompletedRequest, + QueuePodcastRequest, RecordListenDurationRequest, UpdateEpisodeDurationRequest, }; use gloo_timers::callback::Interval; -use js_sys::Array; -use js_sys::Object; +use i18nrs::yew::use_translation; use std::cell::Cell; #[cfg(not(feature = "server_build"))] use std::path::Path; @@ -26,10 +25,7 @@ use wasm_bindgen::closure::Closure; use wasm_bindgen::JsCast; use wasm_bindgen::JsValue; use wasm_bindgen_futures::spawn_local; -use web_sys::{ - window, HtmlAudioElement, HtmlElement, HtmlInputElement, MediaPositionState, - MediaSessionPlaybackState, Navigator, TouchEvent, -}; +use web_sys::{window, HtmlAudioElement, HtmlElement, HtmlInputElement, TouchEvent}; use yew::prelude::*; use yew::{function_component, html, Callback, Html}; use yew_router::history::{BrowserHistory, History}; @@ -37,6 +33,7 @@ use yewdux::prelude::*; #[derive(Properties, PartialEq, Debug, Clone)] pub struct AudioPlayerProps { + pub episode: Episode, pub src: String, pub title: String, pub description: String, @@ -163,10 +160,19 @@ pub fn volume_control(props: &VolumeControlProps) -> Html { #[function_component(AudioPlayer)] pub fn audio_player(props: &AudioPlayerProps) -> Html { + let (i18n, _) = use_translation(); let audio_ref = use_node_ref(); let (state, _dispatch) = use_store::(); let (audio_state, _audio_dispatch) = use_store::(); let show_modal = use_state(|| false); + + // Capture i18n strings before they get moved + let i18n_chapters = i18n.t("audio.chapters").to_string(); + let i18n_close_modal = i18n.t("common.close_modal").to_string(); + let i18n_no_audio_playing = i18n.t("audio.no_audio_playing").to_string(); + let i18n_no_chapters_available = i18n.t("audio.no_chapters_available").to_string(); + let i18n_shownotes = i18n.t("audio.shownotes").to_string(); + let i18n_shownotes_unavailable = i18n.t("audio.shownotes_unavailable").to_string(); let on_modal_close = { let show_modal = show_modal.clone(); Callback::from(move |_: MouseEvent| show_modal.set(false)) @@ -791,7 +797,29 @@ pub fn audio_player(props: &AudioPlayerProps) -> Html { // if let Some(audio_element) = audio_ref.cast::() { // Clone all necessary data to be used inside the closure to avoid FnOnce limitation. + // Flag to prevent processing the same ended event multiple times + let processing_ended = Rc::new(Cell::new(false)); + let processing_ended_clone = processing_ended.clone(); + + // Flag to prevent processing the same ended event multiple times + let processing_ended = Rc::new(Cell::new(false)); + let processing_ended_clone = processing_ended.clone(); + let ended_closure = Closure::wrap(Box::new(move || { + web_sys::console::log_1(&"Episode ended event fired".into()); + + // Check if we're already processing an ended event + if processing_ended_clone.get() { + web_sys::console::log_1( + &"Already processing ended event, skipping duplicate".into(), + ); + return; + } + + // Set flag to indicate we're processing + processing_ended_clone.set(true); + + let processing_flag_for_reset = processing_ended_clone.clone(); let server_name = server_name.clone(); let api_key = api_key.clone(); let user_id = user_id.clone(); @@ -802,8 +830,14 @@ pub fn audio_player(props: &AudioPlayerProps) -> Html { // Closure::wrap(Box::new(move |_| { if offline_status_loop { // If offline, do not perform any action + web_sys::console::log_1( + &"Offline mode - skipping queue advancement".into(), + ); + processing_flag_for_reset.set(false); } else { wasm_bindgen_futures::spawn_local(async move { + web_sys::console::log_1(&"Fetching queued episodes...".into()); + web_sys::console::log_1(&"Fetching queued episodes...".into()); let queued_episodes_result = call_get_queued_episodes( &server_name.clone().unwrap(), &api_key.clone().unwrap(), @@ -812,64 +846,110 @@ pub fn audio_player(props: &AudioPlayerProps) -> Html { .await; match queued_episodes_result { Ok(episodes) => { - if let Some(current_episode) = episodes - .iter() - .find(|ep| ep.episodeid == current_episode_id.unwrap()) - { - let current_queue_position = - current_episode.queueposition.unwrap_or_default(); - // Remove the currently playing episode from the queue - let request = QueuePodcastRequest { - episode_id: current_episode_id.clone().unwrap(), - user_id: user_id.clone().unwrap(), // replace with the actual user ID - is_youtube: current_episode.is_youtube, - }; - let remove_result = call_remove_queued_episode( - &server_name.clone().unwrap(), - &api_key.clone().unwrap(), - &request, - ) - .await; - match remove_result { - Ok(_) => { - // web_sys::console::log_1(&"Successfully removed episode from queue".into()); - } - Err(_e) => { - // web_sys::console::log_1(&format!("Failed to remove episode from queue: {:?}", e).into()); + web_sys::console::log_1( + &format!("Found {} episodes in queue", episodes.len()) + .into(), + ); + + // If queue is empty, just stop playback + if episodes.is_empty() { + web_sys::console::log_1( + &"Queue is empty, stopping playback".into(), + ); + audio_dispatch.reduce_mut(|state| { + state.audio_playing = Some(false); + }); + } else { + // Try to find current episode first to remove it properly + if let Some(current_episode) = episodes + .iter() + .find(|ep| ep.episodeid == current_episode_id.unwrap()) + { + web_sys::console::log_1(&format!("Found current episode in queue (ID: {}), removing it", current_episode.episodeid).into()); + // Remove the currently playing episode from the queue + let request = QueuePodcastRequest { + episode_id: current_episode_id.clone().unwrap(), + user_id: user_id.clone().unwrap(), + is_youtube: current_episode.is_youtube, + }; + let remove_result = call_remove_queued_episode( + &server_name.clone().unwrap(), + &api_key.clone().unwrap(), + &request, + ) + .await; + match remove_result { + Ok(_) => { + web_sys::console::log_1(&"Successfully removed current episode from queue".into()); + } + Err(e) => { + web_sys::console::log_1(&format!("Failed to remove episode from queue: {:?}", e).into()); + } } + } else { + web_sys::console::log_1(&"Current episode not found in queue (likely already removed)".into()); } - if let Some(next_episode) = episodes.iter().find(|ep| { - ep.queueposition == Some(current_queue_position + 1) - }) { - on_play_click( - next_episode.episodeurl.clone(), - next_episode.episodetitle.clone(), - next_episode.episodedescription.clone(), - next_episode.episodepubdate.clone(), - next_episode.episodeartwork.clone(), - next_episode.episodeduration, + + // Now play the first episode in the queue (which is the next one to play) + // Sort by queue position to ensure we get the right one + let mut sorted_episodes = episodes.clone(); + sorted_episodes + .sort_by_key(|ep| ep.queueposition.unwrap_or(999999)); + + if let Some(next_episode) = sorted_episodes.first() { + web_sys::console::log_1(&format!("Playing first episode in queue: {} (ID: {}, Position: {})", + next_episode.episodetitle, next_episode.episodeid, - next_episode.listenduration, - api_key.clone().unwrap().unwrap(), - user_id.unwrap(), - server_name.clone().unwrap(), - audio_dispatch.clone(), - audio_state.clone(), - None, - Some(next_episode.is_youtube.clone()), - ) - .emit(MouseEvent::new("click").unwrap()); + next_episode.queueposition.unwrap_or(0) + ).into()); + + // Check if we have the required data + if let ( + Some(Some(api_key_val)), + Some(user_id_val), + Some(server_name_val), + ) = (api_key.clone(), user_id, server_name.clone()) + { + web_sys::console::log_1( + &"Calling on_play_click for next episode" + .into(), + ); + on_play_click( + next_episode.clone(), + api_key_val, + user_id_val, + server_name_val, + audio_dispatch.clone(), + audio_state.clone(), + false, + ) + .emit(MouseEvent::new("click").unwrap()); + web_sys::console::log_1(&"Successfully emitted play click for next episode".into()); + } else { + web_sys::console::log_1(&"ERROR: Missing required auth data (api_key, user_id, or server_name)".into()); + } } else { + web_sys::console::log_1( + &"No episodes found in queue after sorting".into(), + ); audio_dispatch.reduce_mut(|state| { state.audio_playing = Some(false); }); } } } - Err(_e) => { - // web_sys::console::log_1(&format!("Failed to fetch queued episodes: {:?}", e).into()); + Err(e) => { + web_sys::console::log_1( + &format!("Failed to fetch queued episodes: {:?}", e).into(), + ); } } + + // Reset the processing flag after all async work is complete + processing_flag_for_reset.set(false); + web_sys::console::log_1( + &"Queue processing complete, flag reset".into(), + ); }); } // }) as Box); @@ -883,168 +963,6 @@ pub fn audio_player(props: &AudioPlayerProps) -> Html { } }); - { - let audio_state = audio_state.clone(); - let audio_dispatch = _audio_dispatch.clone(); - let audio_state_clone = audio_state.clone(); - - use_effect_with(audio_state.clone(), move |_| { - if let Some(window) = web_sys::window() { - let navigator: Navigator = window.navigator(); - - // Try to get media session - if let Ok(media_session) = - js_sys::Reflect::get(&navigator, &JsValue::from_str("mediaSession")) - { - // Safely attempt to convert to MediaSession - if let Ok(media_session) = media_session.dyn_into::() { - // Update metadata if we have something playing - if let Some(audio_props) = &audio_state.currently_playing { - // Try to create new metadata - if let Ok(metadata) = web_sys::MediaMetadata::new() { - metadata.set_title(&audio_props.title); - - // Create artwork array - let artwork_array = Array::new(); - let artwork_object = Object::new(); - - // Set up artwork properties - let _ = js_sys::Reflect::set( - &artwork_object, - &"src".into(), - &audio_props.artwork_url.clone().into(), - ); - let _ = js_sys::Reflect::set( - &artwork_object, - &"sizes".into(), - &"512x512".into(), - ); - let _ = js_sys::Reflect::set( - &artwork_object, - &"type".into(), - &"image/jpeg".into(), - ); - - artwork_array.push(&artwork_object); - metadata.set_artwork(&artwork_array.into()); - media_session.set_metadata(Some(&metadata)); - - // Set playback state - if audio_state_clone.audio_playing.unwrap() { - media_session - .set_playback_state(MediaSessionPlaybackState::Playing); - } else { - media_session - .set_playback_state(MediaSessionPlaybackState::Paused); - } - - // Update position state - if let Some(audio_element) = &audio_state_clone.audio_element { - let duration = audio_props.duration_sec; - if !duration.is_nan() && duration > 0.0 { - let position_state = MediaPositionState::new(); - position_state.set_duration(duration); - position_state - .set_playback_rate(audio_state_clone.playback_speed); - position_state.set_position(audio_element.current_time()); - let _ = media_session - .set_position_state_with_state(&position_state); - } - } - } - // Inside your use_effect_with block, after setting up the initial position state: - // Inside your media session setup use_effect: - if let Some(audio_element) = &audio_state_clone.audio_element { - let media_session_clone = media_session.clone(); - let audio_state_for_callback = audio_state_clone.clone(); - let audio_element_clone = audio_element.clone(); - let timeupdate_callback = Closure::wrap(Box::new(move || { - let duration = audio_element_clone.duration(); - // Only update position state if we have a valid duration - if !duration.is_nan() && duration > 0.0 { - let position_state = MediaPositionState::new(); - position_state.set_duration(duration); - position_state.set_playback_rate( - audio_state_for_callback.playback_speed, - ); - position_state - .set_position(audio_element_clone.current_time()); - let _ = media_session_clone - .set_position_state_with_state(&position_state); - } - }) - as Box); - - audio_element.set_ontimeupdate(Some( - timeupdate_callback.as_ref().unchecked_ref(), - )); - timeupdate_callback.forget(); - } - } - - // Set up action handlers - let audio_dispatch_play = audio_dispatch.clone(); - let play_pause_callback = Closure::wrap(Box::new(move || { - audio_dispatch_play.reduce_mut(UIState::toggle_playback); - }) - as Box); - - // Set play/pause handlers - let _ = media_session.set_action_handler( - web_sys::MediaSessionAction::Play, - Some(play_pause_callback.as_ref().unchecked_ref()), - ); - let _ = media_session.set_action_handler( - web_sys::MediaSessionAction::Pause, - Some(play_pause_callback.as_ref().unchecked_ref()), - ); - play_pause_callback.forget(); - - // Set up seek backward handler - let audio_state_back = audio_state.clone(); - let audio_dispatch_back = audio_dispatch.clone(); - let seek_backward_callback = Closure::wrap(Box::new(move || { - if let Some(audio_element) = audio_state_back.audio_element.as_ref() { - let new_time = audio_element.current_time() - 15.0; - let _ = audio_element.set_current_time(new_time); - audio_dispatch_back - .reduce_mut(|state| state.update_current_time(new_time)); - } - }) - as Box); - - let _ = media_session.set_action_handler( - web_sys::MediaSessionAction::Seekbackward, - Some(seek_backward_callback.as_ref().unchecked_ref()), - ); - seek_backward_callback.forget(); - - // Set up seek forward handler - let audio_state_fwd = audio_state.clone(); - let audio_dispatch_fwd = audio_dispatch.clone(); - let seek_forward_callback = Closure::wrap(Box::new(move || { - if let Some(audio_element) = audio_state_fwd.audio_element.as_ref() { - let new_time = audio_element.current_time() + 15.0; - let _ = audio_element.set_current_time(new_time); - audio_dispatch_fwd - .reduce_mut(|state| state.update_current_time(new_time)); - } - }) - as Box); - - let _ = media_session.set_action_handler( - web_sys::MediaSessionAction::Seekforward, - Some(seek_forward_callback.as_ref().unchecked_ref()), - ); - seek_forward_callback.forget(); - } - } - } - - || () - }); - } - // Toggle playback let toggle_playback = { let dispatch = _audio_dispatch.clone(); @@ -1166,21 +1084,13 @@ pub fn audio_player(props: &AudioPlayerProps) -> Html { .find(|ep| ep.queueposition == Some(current_queue_position + 1)) { on_play_click( - next_episode.episodeurl.clone(), - next_episode.episodetitle.clone(), - next_episode.episodedescription.clone(), - next_episode.episodepubdate.clone(), - next_episode.episodeartwork.clone(), - next_episode.episodeduration, - next_episode.episodeid, - next_episode.listenduration, + next_episode.clone(), api_key.clone().unwrap().unwrap(), user_id.unwrap(), server_name.clone().unwrap(), audio_dispatch.clone(), audio_state.clone(), - None, - Some(next_episode.is_youtube.clone()), + false, ) .emit(MouseEvent::new("click").unwrap()); } else { @@ -1251,13 +1161,13 @@ pub fn audio_player(props: &AudioPlayerProps) -> Html { @@ -1406,10 +1316,10 @@ pub fn audio_player(props: &AudioPlayerProps) -> Html { } }
@@ -1451,16 +1361,10 @@ pub fn audio_player(props: &AudioPlayerProps) -> Html {
- { - html! { - <> - - - } - } + @@ -1488,7 +1392,7 @@ pub fn audio_player(props: &AudioPlayerProps) -> Html { on_shownotes_click.emit(e.clone()); // title_click_emit.emit(e); })} class="audio-top-button audio-full-button border-solid border selector-button font-bold py-2 px-4 mt-3 rounded-full flex items-center justify-center"> - { "Shownotes" } + { &i18n_shownotes } { if let Some(chapters) = &audio_state.episode_chapters { @@ -1497,7 +1401,7 @@ pub fn audio_player(props: &AudioPlayerProps) -> Html { } } else { @@ -1512,7 +1416,7 @@ pub fn audio_player(props: &AudioPlayerProps) -> Html { } else { html! { } } @@ -1593,12 +1497,13 @@ pub fn audio_player(props: &AudioPlayerProps) -> Html { let episode_id = props.episode_id; let show_modal = show_modal.clone(); let title_click = title_click.clone(); - + let props = props.clone(); Callback::from(move |e: MouseEvent| { show_modal.set(false); // Close modal before navigation title_click.emit(e); let dispatch_clone = dispatch.clone(); let history_clone = history.clone(); + let props = props.clone(); wasm_bindgen_futures::spawn_local(async move { dispatch_clone.reduce_mut(move |state| { // Only clear fetched_episode if we're navigating to a different episode @@ -1607,7 +1512,31 @@ pub fn audio_player(props: &AudioPlayerProps) -> Html { } state.selected_episode_id = Some(episode_id); }); - history_clone.push("/episode"); + if episode_id != 0 { + history_clone.push(format!("/episode?episode_id={}", episode_id)); + } else { + let mut new_url = "/episode".to_string(); + new_url.push_str("?podcast_title="); + new_url.push_str(&urlencoding::encode(&props.title)); + new_url.push_str("&episode_url="); + new_url.push_str(&urlencoding::encode(&props.episode.episodeurl)); + new_url.push_str("&audio_url="); + new_url.push_str(&urlencoding::encode(&props.src)); + new_url.push_str("&is_youtube="); + new_url.push_str(&props.is_youtube.to_string()); + + history_clone.push(new_url); + + dispatch_clone.reduce_mut(move |state| { + state.selected_episode_id = Some(episode_id); + state.selected_episode_url = Some(props.episode.episodeurl.clone()); + state.selected_episode_audio_url = Some(props.src.clone()); + state.selected_podcast_title = Some(props.title.clone()); + state.person_episode = Some(false); + state.selected_is_youtube = props.episode.is_youtube; + state.fetched_episode = None; + }); + } }); }) }; @@ -1623,7 +1552,7 @@ pub fn audio_player(props: &AudioPlayerProps) -> Html { duration={audio_props.duration_sec as i32} on_close={on_modal_close.clone()} on_show_notes={nav_to_episode} - listen_duration_percentage={listen_duration_percentage} + listen_duration_percentage={listen_duration_percentage as i32} is_youtube={props.is_youtube} /> } @@ -1639,41 +1568,39 @@ pub fn audio_player(props: &AudioPlayerProps) -> Html { } pub fn on_play_pause( - episode_url_for_closure: String, - episode_title_for_closure: String, - episode_description_for_closure: String, - episode_release_date_for_closure: String, - episode_artwork_for_closure: String, - episode_duration_for_closure: i32, - episode_id_for_closure: i32, - listen_duration_for_closure: Option, + episode: Episode, api_key: String, user_id: i32, server_name: String, audio_dispatch: Dispatch, audio_state: Rc, - is_local: Option, - is_youtube_vid: Option, + app_state: Rc, ) -> Callback { + let is_local = if app_state.podcast_added.unwrap_or(false) && episode.episodeid != 0 { + app_state + .downloaded_episodes + .is_server_download(episode.episodeid) + || { + #[cfg(not(feature = "server_build"))] + { + app_state + .downloaded_episodes + .is_local_download(episode.episodeid) + } + #[cfg(feature = "server_build")] + { + false + } + } + } else { + false + }; + Callback::from(move |e: MouseEvent| { - let episode_url_for_play = episode_url_for_closure.clone(); - let episode_title_for_play = episode_title_for_closure.clone(); - let episode_description_for_play = episode_description_for_closure.clone(); - let episode_release_date_for_play = episode_release_date_for_closure.clone(); - let episode_artwork_for_play = episode_artwork_for_closure.clone(); - let episode_duration_for_play = episode_duration_for_closure.clone(); - let episode_id_for_play = episode_id_for_closure.clone(); - let server_play = server_name.clone(); - let api_play = api_key.clone(); - let audio_dis_play = audio_dispatch.clone(); - let audio_state_play = audio_state.clone(); - // Changed from '_' to 'e' let is_current = audio_state .currently_playing .as_ref() - .map_or(false, |current| { - current.episode_id == episode_id_for_closure - }); + .map_or(false, |current| current.episode_id == episode.episodeid); if is_current { audio_dispatch.reduce_mut(|state| { let currently_playing = state.audio_playing.unwrap_or(false); @@ -1687,102 +1614,71 @@ pub fn on_play_pause( } }); } else { + web_sys::console::log_1( + &format!( + "on_play_pause calling on_play_click with is_youtube_vid: {:?}", + episode.is_youtube + ) + .into(), + ); on_play_click( - episode_url_for_play, - episode_title_for_play, - episode_description_for_play, - episode_release_date_for_play, - episode_artwork_for_play, - episode_duration_for_play, - episode_id_for_play, - listen_duration_for_closure, - api_play, + episode.clone(), + api_key.clone(), user_id, - server_play, - audio_dis_play, - audio_state_play, + server_name.clone(), + audio_dispatch.clone(), + audio_state.clone(), is_local, - is_youtube_vid, ) - .emit(e); // Pass the event instead of '_' + .emit(e); } }) } pub fn on_play_click( - episode_url_for_closure: String, - episode_title_for_closure: String, - episode_description_for_closure: String, - episode_release_date_for_closure: String, - episode_artwork_for_closure: String, - episode_duration_for_closure: i32, - episode_id_for_closure: i32, - listen_duration_for_closure: Option, + mut episode: Episode, api_key: String, user_id: i32, server_name: String, audio_dispatch: Dispatch, _audio_state: Rc, - is_local: Option, - is_youtube_vid: Option, + is_local: bool, ) -> Callback { Callback::from(move |_: MouseEvent| { - let episode_url_for_closure = episode_url_for_closure.clone(); - let episode_title_for_closure = episode_title_for_closure.clone(); - let episode_description_for_closure = episode_description_for_closure.clone(); - let episode_release_date_for_closure = episode_release_date_for_closure.clone(); - let episode_artwork_for_closure = episode_artwork_for_closure.clone(); - let episode_duration_for_closure = episode_duration_for_closure.clone(); - let listen_duration_for_closure = listen_duration_for_closure.clone(); - let episode_id_for_closure = episode_id_for_closure.clone(); - let episode_is_youtube = is_youtube_vid.clone().unwrap(); let api_key = api_key.clone(); let user_id = user_id.clone(); let server_name = server_name.clone(); let audio_dispatch = audio_dispatch.clone(); let episode_pos: f32 = 0.0; - let episode_id = episode_id_for_closure.clone(); - - let call_ep_url = episode_url_for_closure.clone(); let check_server_name = server_name.clone(); let check_api_key = api_key.clone(); let check_user_id = user_id.clone(); - let episode_title_for_wasm = episode_title_for_closure.clone(); - let episode_description_for_wasm = episode_description_for_closure.clone(); - let episode_release_date_for_wasm = episode_release_date_for_closure.clone(); - let episode_url_for_wasm = call_ep_url.clone(); - let episode_artwork_for_wasm = episode_artwork_for_closure.clone(); - let episode_duration_for_wasm = episode_duration_for_closure.clone(); - let episode_id_for_wasm = episode_id_for_closure.clone(); let app_dispatch = audio_dispatch.clone(); - let episode_url = episode_url_for_wasm.clone(); - let episode_title = episode_title_for_wasm.clone(); - web_sys::console::log_1(&JsValue::from_str(&episode_id_for_wasm.to_string())); + let title = episode.episodetitle.clone(); + let url = episode.episodeurl.clone(); spawn_local(async move { // Check if the episode exists in the database (your existing code) let mut episode_exists = call_check_episode_in_db( &check_server_name.clone(), &check_api_key.clone(), check_user_id.clone(), - &episode_title.clone(), - &episode_url.clone(), + &title, + &url, ) .await .unwrap_or(false); - let mut episode_id = episode_id_for_wasm; - // If the episode exists but the current `episode_id` is `0`, retrieve the correct `episode_id` - if episode_exists && episode_id == 0 { + if episode_exists && episode.episodeid == 0 { match call_get_episode_id( &check_server_name, &check_api_key, &check_user_id, - &episode_title, - &episode_url, - episode_is_youtube, + &title, + &url, + episode.is_youtube, ) .await { @@ -1797,7 +1693,7 @@ pub fn on_play_click( "New episode ID: {}", new_episode_id ))); - episode_id = new_episode_id; + episode.episodeid = new_episode_id; } } Err(_) => { @@ -1810,7 +1706,7 @@ pub fn on_play_click( } web_sys::console::log_1(&JsValue::from_str(&format!( "post episode ID: {}", - episode_id + episode.episodeid ))); web_sys::console::log_1(&JsValue::from_str(&format!( @@ -1829,10 +1725,10 @@ pub fn on_play_click( let history_api_key = check_api_key.clone(); let history_add = HistoryAddRequest { - episode_id, + episode_id: episode.episodeid, episode_pos, user_id, - is_youtube: episode_is_youtube, + is_youtube: episode.is_youtube, }; let add_history_future = @@ -1851,9 +1747,9 @@ pub fn on_play_click( let queue_api_key = check_api_key.clone(); let request = QueuePodcastRequest { - episode_id, + episode_id: episode.episodeid, user_id, - is_youtube: episode_is_youtube, + is_youtube: episode.is_youtube, }; let queue_api = Option::from(queue_api_key); @@ -1890,18 +1786,18 @@ pub fn on_play_click( }); // Determine the source URL - let src = if episode_url_for_wasm.contains("youtube.com") { + let src = if episode.episodeurl.contains("youtube.com") { format!( "{}/api/data/stream/{}?api_key={}&user_id={}&type=youtube", - server_name, episode_id, api_key, user_id + server_name, episode.episodeid, api_key, user_id ) - } else if is_local.unwrap_or(false) { + } else if is_local { format!( "{}/api/data/stream/{}?api_key={}&user_id={}", - server_name, episode_id, api_key, user_id + server_name, episode.episodeid, api_key, user_id ) } else { - episode_url_for_wasm.clone() + episode.episodeurl.clone() }; // NEW CODE: Analyze the actual audio duration before playing @@ -1910,6 +1806,11 @@ pub fn on_play_click( let server_name_for_player = server_name.clone(); let api_key_for_player = api_key.clone(); + let title = episode.episodetitle.clone(); + let description = episode.episodedescription.clone(); + let pubdate = episode.episodepubdate.clone(); + let artworkurl = episode.episodeartwork.clone(); + let episode = episode.clone(); wasm_bindgen_futures::spawn_local(async move { // Function to get actual duration from audio file async fn get_actual_duration(audio_src: &str) -> Option { @@ -1978,22 +1879,36 @@ pub fn on_play_click( let actual_duration_sec = get_actual_duration(&src_for_analysis).await; // Use the actual duration if available, otherwise fall back to provided duration - let final_duration_sec = - actual_duration_sec.unwrap_or(episode_duration_for_wasm as f64); + let final_duration_sec = actual_duration_sec.unwrap_or(episode.episodeduration as f64); + + // Set actual duration in db + if (final_duration_sec as i32) != episode.episodeduration { + let req = UpdateEpisodeDurationRequest { + episode_id: episode.episodeid, + new_duration: final_duration_sec as i32, + is_youtube: episode.is_youtube, + }; + let _ = call_update_episode_duration( + &server_name_for_player, + &Some(api_key_for_player.clone()), + &req, + ) + .await; + } web_sys::console::log_1(&JsValue::from_str(&format!( "Original duration: {}s, Actual duration: {}s", - episode_duration_for_wasm, final_duration_sec + episode.episodeduration, final_duration_sec ))); // Continue with the rest of your existing code... - if episode_id != 0 { + if episode.episodeid != 0 { match call_get_podcast_id_from_ep( &server_name_for_player, &Some(api_key_for_player.clone()), - episode_id, + episode.episodeid, user_id, - Some(episode_is_youtube.clone()), + Some(episode.is_youtube), ) .await { @@ -2003,13 +1918,12 @@ pub fn on_play_click( &Some(api_key_for_player.clone()), user_id, podcast_id, - episode_is_youtube, + episode.is_youtube, ) .await { Ok((playback_speed, start_skip, end_skip)) => { - let start_pos_sec = - listen_duration_for_closure.unwrap_or(0).max(start_skip) as f64; + let start_pos_sec = episode.listenduration.max(start_skip) as f64; let end_pos_sec = end_skip as f64; audio_dispatch_for_duration.reduce_mut(move |audio_state| { @@ -2019,18 +1933,19 @@ pub fn on_play_click( audio_state.audio_volume = 100.0; audio_state.offline = Some(false); audio_state.currently_playing = Some(AudioPlayerProps { + episode: episode.clone(), src: src.clone(), - title: episode_title_for_wasm.clone(), - description: episode_description_for_wasm.clone(), - release_date: episode_release_date_for_wasm.clone(), - artwork_url: episode_artwork_for_wasm.clone(), + title: title, + description: description, + release_date: pubdate, + artwork_url: artworkurl, duration: format!("{}", final_duration_sec as i32), // Use actual duration - episode_id: episode_id_for_wasm.clone(), + episode_id: episode.episodeid, duration_sec: final_duration_sec, // Use actual duration start_pos_sec, end_pos_sec: end_pos_sec as f64, offline: false, - is_youtube: episode_is_youtube.clone(), + is_youtube: episode.is_youtube, }); audio_state.set_audio_source(src.to_string()); if let Some(audio) = &audio_state.audio_element { @@ -2062,18 +1977,19 @@ pub fn on_play_click( audio_state.audio_volume = 100.0; audio_state.offline = Some(false); audio_state.currently_playing = Some(AudioPlayerProps { + episode: episode.clone(), src: src.clone(), - title: episode_title_for_wasm.clone(), - description: episode_description_for_wasm.clone(), - release_date: episode_release_date_for_wasm.clone(), - artwork_url: episode_artwork_for_wasm.clone(), + title: title, + description: description, + release_date: pubdate, + artwork_url: artworkurl, duration: format!("{}", final_duration_sec as i32), // Use actual duration - episode_id: episode_id_for_wasm.clone(), + episode_id: episode.episodeid, duration_sec: final_duration_sec, // Use actual duration start_pos_sec: 0.0, end_pos_sec: 0.0, offline: false, - is_youtube: episode_is_youtube.clone(), + is_youtube: episode.is_youtube, }); audio_state.set_audio_source(src.to_string()); if let Some(audio) = &audio_state.audio_element { @@ -2088,7 +2004,7 @@ pub fn on_play_click( #[cfg(not(feature = "server_build"))] pub fn on_play_pause_offline( - episode_info: EpisodeDownload, + episode_info: Episode, audio_dispatch: Dispatch, audio_state: Rc, app_state: Dispatch, @@ -2126,12 +2042,12 @@ pub fn on_play_pause_offline( #[cfg(not(feature = "server_build"))] pub fn on_play_click_offline( - episode_info: EpisodeDownload, + episode: Episode, audio_dispatch: Dispatch, app_dispatch: Dispatch, ) -> Callback { Callback::from(move |_: MouseEvent| { - let episode_info_for_closure = episode_info.clone(); + let episode_info_for_closure = episode.clone(); let audio_dispatch = audio_dispatch.clone(); let app_dispatch = app_dispatch.clone(); @@ -2152,9 +2068,10 @@ pub fn on_play_click_offline( let episode_artwork_for_wasm = episode_info_for_closure.episodeartwork.clone(); let episode_duration_for_wasm = episode_info_for_closure.episodeduration.clone(); let episode_id_for_wasm = episode_info_for_closure.episodeid.clone(); - let listen_duration_for_closure = episode_info_for_closure.listenduration.clone(); - let episode_is_youtube_for_wasm = episode_info.is_youtube.clone(); + let listen_duration_for_closure = episode_info_for_closure.listenduration; + let episode_is_youtube_for_wasm = episode.is_youtube.clone(); + let episode = episode.clone(); wasm_bindgen_futures::spawn_local(async move { match start_local_file_server(&file_path).await { Ok(server_url) => { @@ -2191,7 +2108,10 @@ pub fn on_play_click_offline( let onloadedmetadata = Closure::wrap(Box::new(move |_event: web_sys::Event| { let duration = src_audio.duration(); - if !duration.is_nan() && !duration.is_infinite() && duration > 0.0 { + if !duration.is_nan() + && !duration.is_infinite() + && duration > 0.0 + { resolve_clone .call1( &JsValue::UNDEFINED, @@ -2259,6 +2179,7 @@ pub fn on_play_click_offline( audio_state.audio_volume = 100.0; audio_state.offline = Some(true); audio_state.currently_playing = Some(AudioPlayerProps { + episode: episode.clone(), src: src.clone(), title: episode_title_for_wasm.clone(), description: episode_description_for_wasm.clone(), @@ -2267,14 +2188,14 @@ pub fn on_play_click_offline( duration: format!("{}", final_duration_sec as i32), // Use actual duration episode_id: episode_id_for_wasm.clone(), duration_sec: final_duration_sec, // Use actual duration - start_pos_sec: listen_duration_for_closure.unwrap_or(0) as f64, + start_pos_sec: listen_duration_for_closure as f64, end_pos_sec: 0.0, offline: true, is_youtube: episode_is_youtube_for_wasm, }); audio_state.set_audio_source(src.to_string()); if let Some(audio) = &audio_state.audio_element { - audio.set_current_time(listen_duration_for_closure.unwrap_or(0) as f64); + audio.set_current_time(listen_duration_for_closure as f64); let _ = audio.play(); } audio_state.audio_playing = Some(true); @@ -2290,7 +2211,9 @@ pub fn on_play_click_offline( }) } +#[allow(dead_code)] pub fn on_play_click_shared( + episode: Episode, episode_url: String, episode_title: String, episode_description: String, @@ -2315,7 +2238,7 @@ pub fn on_play_click_shared( // NEW: Analyze duration before playing let audio_dispatch_for_duration = audio_dispatch.clone(); let episode_url_for_analysis = episode_url.clone(); - + let episode = episode.clone(); wasm_bindgen_futures::spawn_local(async move { // Function to get actual duration from audio file async fn get_actual_duration(audio_src: &str) -> Option { @@ -2397,6 +2320,7 @@ pub fn on_play_click_shared( audio_state.audio_volume = 100.0; audio_state.offline = Some(false); audio_state.currently_playing = Some(AudioPlayerProps { + episode: episode.clone(), src: episode_url.clone(), title: episode_title.clone(), description: episode_description.clone(), diff --git a/web/src/components/click_events.rs b/web/src/components/click_events.rs index f02a9123..0d44ae1b 100644 --- a/web/src/components/click_events.rs +++ b/web/src/components/click_events.rs @@ -1,5 +1,5 @@ use crate::components::context::AppState; -use crate::components::podcast_layout::ClickedFeedURL; +use crate::pages::podcast_layout::ClickedFeedURL; use crate::requests::pod_req::{call_check_podcast, call_get_podcast_id}; use crate::requests::search_pods::{ call_get_podcast_episodes, call_get_youtube_episodes, call_parse_podcast_url, @@ -15,7 +15,8 @@ pub fn create_on_title_click( server_name: String, api_key: Option>, history: &BrowserHistory, - podcast_index_id: i64, + podcast_id: i32, + podcast_index_id: i32, podcast_title: String, podcast_url: String, podcast_description: String, @@ -51,7 +52,7 @@ pub fn create_on_title_click( }); let podcast_values = ClickedFeedURL { - podcastid: 0, + podcastid: podcast_id, podcastname: podcast_title.clone(), feedurl: podcast_url.clone(), description: podcast_description.clone(), @@ -111,7 +112,11 @@ pub fn create_on_title_click( }; match podcast_feed_results { - Ok(podcast_feed_results) => { + Ok(mut podcast_feed_results) => { + // Fix is_youtube field for all episodes based on the endpoint used + for episode in &mut podcast_feed_results.episodes { + episode.is_youtube = is_youtube; + } dispatch.reduce_mut(move |state| { state.podcast_added = Some(true); state.podcast_feed_results = Some(podcast_feed_results); diff --git a/web/src/components/context.rs b/web/src/components/context.rs index ce788fe8..aa46b2e8 100644 --- a/web/src/components/context.rs +++ b/web/src/components/context.rs @@ -1,17 +1,18 @@ use crate::components::audio::AudioPlayerProps; use crate::components::notification_center::TaskProgress; -use crate::components::podcast_layout::ClickedFeedURL; -use crate::components::podcasts::PodcastLayout; +use crate::pages::podcast_layout::ClickedFeedURL; +use crate::pages::podcasts::PodcastLayout; +use crate::requests::episode::Episode; use crate::requests::login_requests::AddUserRequest; use crate::requests::login_requests::GetUserDetails; use crate::requests::login_requests::LoginServerRequest; use crate::requests::login_requests::{GetApiDetails, TimeZoneInfo}; use crate::requests::pod_req::PodcastResponseExtra; + use crate::requests::pod_req::{ - Chapter, Episode, EpisodeDownloadResponse, EpisodeMetadataResponse, Funding, - HistoryDataResponse, HomeOverview, Person, Playlist, PlaylistInfo, Podcast, PodcastResponse, - PodrollItem, QueuedEpisodesResponse, RecentEps, RefreshProgress, SavedEpisodesResponse, - SharedEpisodeResponse, Transcript, Value, + Chapter, EpisodeDownloadResponse, Funding, HistoryDataResponse, HomeOverview, Person, Playlist, + PlaylistInfo, Podcast, PodcastResponse, PodrollItem, QueuedEpisodesResponse, RecentEps, + RefreshProgress, SavedEpisodesResponse, SharedEpisodeResponse, Transcript, Value, }; use crate::requests::search_pods::{ PeopleFeedResult, PodcastFeedResult, PodcastSearchResult, SearchResponse, YouTubeChannel, @@ -73,6 +74,7 @@ pub struct ExpandedDescriptions { } #[derive(Default, Clone, PartialEq, Store)] +#[allow(dead_code)] pub struct PlaylistState { pub include_unplayed: bool, pub include_partially_played: bool, @@ -102,11 +104,12 @@ pub struct AppState { pub people_feed_results: Option, pub server_feed_results: Option, pub queued_episodes: Option, - pub saved_episodes: Option, + #[serde(default)] + pub saved_episodes: Vec, pub episode_history: Option, - pub downloaded_episodes: Option, + #[serde(default)] + pub downloaded_episodes: DownloadedEpisodeRecords, pub search_episodes: Option, - pub episodes: Option, pub clicked_podcast_info: Option, pub pods: Option, pub podcast_feed_return: Option, @@ -117,14 +120,15 @@ pub struct AppState { #[serde(default)] pub expanded_descriptions: HashSet, pub selected_theme: Option, - pub fetched_episode: Option, + pub fetched_episode: Option, pub shared_fetched_episode: Option, pub selected_episode_id: Option, pub selected_episode_url: Option, pub selected_episode_audio_url: Option, pub selected_podcast_title: Option, pub person_episode: Option, - pub selected_is_youtube: Option, + #[serde(default)] + pub selected_is_youtube: bool, pub add_user_request: Option, pub time_zone_setup: Option, pub add_settings_user_reqeust: Option, @@ -137,10 +141,7 @@ pub struct AppState { pub date_format: Option, pub podcast_added: Option, pub completed_episodes: Option>, - pub saved_episode_ids: Option>, pub queued_episode_ids: Option>, - pub downloaded_episode_ids: Option>, - pub locally_downloaded_episodes: Option>, pub podcast_layout: Option, pub refresh_progress: Option, pub youtube_search_results: Option, @@ -155,6 +156,129 @@ pub struct AppState { pub active_tasks: Option>, } +impl AppState { + pub fn saved_episode_ids(&self) -> impl Iterator + '_ { + self.saved_episodes.iter().map(|e| e.episodeid) + } +} + +/// A collection of records for episodes downloaded either locally or on the server. +/// Mutating this collection does not affect the filesystem and episodes will need +/// to be downloaded or deleted to match changes made here. +#[derive(Default, Deserialize, Clone, PartialEq, Debug)] +pub struct DownloadedEpisodeRecords { + episodes: Vec, + local_ids: HashSet, + server_ids: HashSet, +} + +#[allow(dead_code)] +impl DownloadedEpisodeRecords { + /// Creates an iterator of all downloaded &Episode + pub fn episodes(&self) -> impl Iterator + '_ { + self.episodes.iter() + } + + /// Creates an unordered iterator over ids for episodes downloaded locally + pub fn local_ids(&self) -> impl Iterator + '_ { + self.local_ids.iter().map(|id| id.clone()) + } + + /// Creates an unordered iterator over ids for episodes downloaded to the server + pub fn server_ids(&self) -> impl Iterator + '_ { + self.server_ids.iter().map(|id| id.clone()) + } + + /// Checks if episode is downloaded to the server + pub fn is_server_download(&self, id: i32) -> bool { + self.server_ids.contains(&id) + } + + /// Checks if episode is downloaded to the server + pub fn is_local_download(&self, id: i32) -> bool { + self.local_ids.contains(&id) + } + + /// Checks if episode is downloaded to either the server or locally + pub fn is_download(&self, id: i32) -> bool { + return self.is_local_download(id) || self.is_server_download(id); + } + + /// Add a record of an Episode downloaded locally + pub fn push_local(&mut self, episode: Episode) { + let id = episode.episodeid; + // only add Episode if the id doesn't exist in either set + if !self.server_ids.contains(&episode.episodeid) + && !self.local_ids.contains(&episode.episodeid) + { + self.episodes.push(episode); + } + + self.local_ids.insert(id); + } + + /// Add a record of an Episode downloaded to the server + pub fn push_server(&mut self, episode: Episode) { + let id = episode.episodeid; + // only add Episode if the id doesn't exist in either set + if !self.server_ids.contains(&episode.episodeid) + && !self.local_ids.contains(&episode.episodeid) + { + self.episodes.push(episode); + } + + self.server_ids.insert(id); + } + + /// Remove the record of an Episode downloaded locally + pub fn remove_local(&mut self, id: i32) { + self.local_ids.remove(&id); + + // remove the ep if it isn't also downloaded on the server + if !self.server_ids.contains(&id) { + self.episodes.retain(|ep| ep.episodeid != id); + } + } + + /// Remove the record of an Episode downloaded on the server + pub fn remove_server(&mut self, id: i32) { + self.server_ids.remove(&id); + + // remove the ep if it isn't also downloaded locally + if !self.local_ids.contains(&id) { + self.episodes.retain(|ep| ep.episodeid != id); + } + } + + pub fn clear(&mut self) { + self.episodes.clear(); + self.server_ids.clear(); + self.local_ids.clear(); + } + + /// Remove all records of episodes downloaded locally + pub fn clear_local(&mut self) { + for id in self.local_ids.drain() { + if !self.server_ids.contains(&id) { + self.episodes.retain(|ep| ep.episodeid != id); + } + } + } + + /// Remove all records of episodes downloaded on the server + pub fn clear_server(&mut self) { + for id in self.local_ids.drain() { + if !self.server_ids.contains(&id) { + self.episodes.retain(|ep| ep.episodeid != id); + } + } + } + + pub fn len(&self) -> usize { + self.episodes.len() + } +} + #[derive(Default, Deserialize, Clone, PartialEq, Store, Debug)] pub struct UserStatsStore { pub stats: Option, @@ -267,6 +391,7 @@ impl AppState { } #[derive(Default, Clone, PartialEq, Store, Debug)] +#[allow(dead_code)] pub struct FilterState { pub selected_category: Option, pub category_filter_list: Option>, @@ -274,6 +399,7 @@ pub struct FilterState { // Add this alongside your other state structs #[derive(Default, Clone, PartialEq, Store)] +#[allow(dead_code)] pub struct PodcastState { pub added_podcast_urls: HashSet, } diff --git a/web/src/components/context_menu_button.rs b/web/src/components/context_menu_button.rs new file mode 100644 index 00000000..cdda22be --- /dev/null +++ b/web/src/components/context_menu_button.rs @@ -0,0 +1,1011 @@ +use crate::components::context::{AppState, UIState}; +#[cfg(not(feature = "server_build"))] +use crate::pages::downloads_tauri::{ + download_file, remove_episode_from_local_db, update_local_database, update_podcast_database, +}; +use crate::requests::episode::Episode; + +use crate::components::gen_funcs::format_error_message; +use crate::components::gen_funcs::format_time; +use crate::components::notification_center::{NotificationCenter, ToastNotification}; +use crate::components::safehtml::SafeHtml; +use crate::requests::pod_req::{ + call_download_episode, call_mark_episode_completed, call_mark_episode_uncompleted, + call_queue_episode, call_remove_downloaded_episode, call_remove_queued_episode, + call_remove_saved_episode, call_save_episode, DownloadEpisodeRequest, + MarkEpisodeCompletedRequest, QueuePodcastRequest, SavePodcastRequest, +}; +#[cfg(not(feature = "server_build"))] +use crate::requests::pod_req::{ + call_get_episode_metadata, call_get_podcast_details, EpisodeRequest, +}; +use crate::requests::search_pods::{ + call_get_podcast_info, call_youtube_search, test_connection, YouTubeSearchResults, +}; +use gloo_events::EventListener; +use gloo_timers::callback::Timeout; +use wasm_bindgen::closure::Closure; +use wasm_bindgen::JsCast; +use web_sys::HtmlElement; +use web_sys::{window, Element, HtmlInputElement, MouseEvent}; +use yew::prelude::*; +use yew::Callback; +use yew_router::history::{BrowserHistory, History}; +use yewdux::prelude::*; + +/// Specific page types for unique ctx menu implementations +#[derive(Clone, PartialEq)] +pub enum PageType { + Saved, + Queue, + Downloads, + LocalDownloads, + Default, +} + +#[derive(Properties, Clone, PartialEq)] +pub struct ContextButtonProps { + pub episode: Episode, + pub page_type: PageType, + #[prop_or(false)] + pub show_menu_only: bool, + #[prop_or(None)] + pub position: Option<(i32, i32)>, + #[prop_or(None)] + pub on_close: Option>, +} + +#[function_component(ContextMenuButton)] +pub fn context_button(props: &ContextButtonProps) -> Html { + let dropdown_open = use_state(|| false); + let (post_state, post_dispatch) = use_store::(); + let (_ui_state, _ui_dispatch) = use_store::(); + let api_key = post_state + .auth_details + .as_ref() + .map(|ud| ud.api_key.clone()); + let user_id = post_state.user_details.as_ref().map(|ud| ud.UserID.clone()); + let server_name = post_state + .auth_details + .as_ref() + .map(|ud| ud.server_name.clone()); + let dropdown_ref = use_node_ref(); + let button_ref = use_node_ref(); + + // Update dropdown_open if show_menu_only prop changes + { + let dropdown_open = dropdown_open.clone(); + use_effect_with(props.show_menu_only, move |show_menu_only| { + if *show_menu_only { + dropdown_open.set(true); + } + || () + }); + } + + let toggle_dropdown = { + let dropdown_open = dropdown_open.clone(); + Callback::from(move |e: MouseEvent| { + e.stop_propagation(); + dropdown_open.set(!*dropdown_open); + }) + }; + + // Close dropdown when clicking outside + { + let dropdown_open = dropdown_open.clone(); + let dropdown_ref = dropdown_ref.clone(); + let button_ref = button_ref.clone(); + let on_close = props.on_close.clone(); + let show_menu_only = props.show_menu_only; + + use_effect_with((*dropdown_open, ()), move |_| { + let document = window().unwrap().document().unwrap(); + let dropdown_open = dropdown_open.clone(); + let dropdown_ref = dropdown_ref.clone(); + let button_ref = button_ref.clone(); + let on_close = on_close.clone(); + let show_menu_only = show_menu_only; + + // Handle outside clicks/touches to dismiss menu + let handle_outside_interaction = { + let dropdown_open = dropdown_open.clone(); + let dropdown_ref = dropdown_ref.clone(); + let button_ref = button_ref.clone(); + let on_close = on_close.clone(); + + move |event: &web_sys::Event| { + if *dropdown_open { + if let Ok(target) = event.target().unwrap().dyn_into::() { + if let Some(dropdown_element) = dropdown_ref.cast::() { + // Check if click is outside dropdown + let outside_dropdown = !dropdown_element.contains(Some(&target)); + + // Check if click is outside button (only if button exists) + let outside_button = if let Some(button_element) = + button_ref.cast::() + { + !button_element.contains(Some(&target)) + } else { + // If no button exists (show_menu_only case), consider it as outside + true + }; + + if outside_dropdown && outside_button { + dropdown_open.set(false); + // If this is a long press menu (show_menu_only is true), + // call the on_close callback when clicked outside + if show_menu_only { + if let Some(on_close) = &on_close { + on_close.emit(()); + } + } + } + } + } + } + } + }; + + // Add click listener for desktop + let click_handler = handle_outside_interaction.clone(); + let click_listener = EventListener::new(&document, "click", move |event| { + click_handler(event); + }); + + // Add touchend listener for mobile (more reliable than touchstart for outside clicks) + let touch_handler = handle_outside_interaction.clone(); + let touch_listener = EventListener::new(&document, "touchend", move |event| { + touch_handler(event); + }); + + move || { + drop(click_listener); + drop(touch_listener); + } + }); + } + + let check_episode_id = props.episode.episodeid; + + let queue_api_key = api_key.clone(); + let queue_server_name = server_name.clone(); + let queue_post = post_dispatch.clone(); + // let server_name = server_name.clone(); + let on_add_to_queue = { + let episode = props.episode.clone(); + Callback::from(move |_| { + let server_name_copy = queue_server_name.clone(); + let api_key_copy = queue_api_key.clone(); + let queue_post = queue_post.clone(); + let episode_clone = episode.clone(); + let request = QueuePodcastRequest { + episode_id: episode.episodeid, + user_id: user_id.unwrap(), // replace with the actual user ID + is_youtube: episode.is_youtube, + }; + let server_name = server_name_copy; // replace with the actual server name + let api_key = api_key_copy; // replace with the actual API key + let future = async move { + // let _ = call_queue_episode(&server_name.unwrap(), &api_key.flatten(), &request).await; + // queue_post.reduce_mut(|state| state.info_message = Option::from(format!("Episode added to Queue!"))); + match call_queue_episode(&server_name.unwrap(), &api_key.flatten(), &request).await + { + Ok(success_message) => { + queue_post.reduce_mut(|state| { + state.info_message = Option::from(format!("{}", success_message)); + if let Some(ref mut queued_episodes) = state.queued_episode_ids { + queued_episodes.push(episode_clone.episodeid); + } + }); + } + Err(e) => { + let formatted_error = format_error_message(&e.to_string()); + queue_post.reduce_mut(|state| { + state.error_message = Option::from(format!("{}", formatted_error)) + }); + // Handle error, e.g., display the error message + } + } + }; + wasm_bindgen_futures::spawn_local(future); + // dropdown_open.set(false); + }) + }; + + let remove_queue_api_key = api_key.clone(); + let remove_queue_server_name = server_name.clone(); + let dispatch_clone = post_dispatch.clone(); + // let server_name = server_name.clone(); + let on_remove_queued_episode = { + let episode = props.episode.clone(); + let episode_id = props.episode.episodeid; + Callback::from(move |_: MouseEvent| { + let post_dispatch = dispatch_clone.clone(); + let server_name_copy = remove_queue_server_name.clone(); + let api_key_copy = remove_queue_api_key.clone(); + let request = QueuePodcastRequest { + episode_id: episode.episodeid, + user_id: user_id.unwrap(), // replace with the actual user ID + is_youtube: episode.is_youtube, + }; + let server_name = server_name_copy; // replace with the actual server name + let api_key = api_key_copy; // replace with the actual API key + let future = async move { + match call_remove_queued_episode( + &server_name.unwrap(), + &api_key.flatten(), + &request, + ) + .await + { + Ok(success_message) => { + let formatted_info = format_error_message(&success_message.to_string()); + + // queue_post.reduce_mut(|state| state.info_message = Option::from(format!("{}", success_message))); + post_dispatch.reduce_mut(|state| { + // Here, you should remove the episode from the queued_episodes + if let Some(ref mut queued_episodes) = state.queued_episodes { + queued_episodes + .episodes + .retain(|ep| ep.episodeid != episode_id); + } + if let Some(ref mut queued_episode_ids) = state.queued_episode_ids { + queued_episode_ids.retain(|&id| id != episode_id); + } + // Optionally, you can update the info_message with success message + state.info_message = Some(format!("{}", formatted_info).to_string()); + }); + } + Err(e) => { + let formatted_error = format_error_message(&e.to_string()); + post_dispatch.reduce_mut(|state| { + state.error_message = Option::from(format!("{}", formatted_error)) + }); + // Handle error, e.g., display the error message + } + } + }; + wasm_bindgen_futures::spawn_local(future); + // dropdown_open.set(false); + }) + }; + + let is_queued = post_state + .queued_episode_ids + .as_ref() + .unwrap_or(&vec![]) + .contains(&check_episode_id.clone()); + + let on_toggle_queue = { + let on_add_to_queue = on_add_to_queue.clone(); + let on_remove_queued_episode = on_remove_queued_episode.clone(); + Callback::from(move |e: MouseEvent| { + if is_queued { + on_remove_queued_episode.emit(e); + } else { + on_add_to_queue.emit(()); + } + }) + }; + + let saved_api_key = api_key.clone(); + let saved_server_name = server_name.clone(); + let save_post = post_dispatch.clone(); + let on_save_episode = { + let episode = props.episode.clone(); + Callback::from(move |_| { + let server_name_copy = saved_server_name.clone(); + let api_key_copy = saved_api_key.clone(); + let post_state = save_post.clone(); + let episode_clone = episode.clone(); + let request = SavePodcastRequest { + episode_id: episode.episodeid, // changed from episode_title + user_id: user_id.unwrap(), + is_youtube: episode.is_youtube, + }; + let server_name = server_name_copy; // replace with the actual server name + let api_key = api_key_copy; // replace with the actual API key + let ep = episode.clone(); + let future = async move { + // let return_mes = call_save_episode(&server_name.unwrap(), &api_key.flatten(), &request).await; + // post_state.reduce_mut(|state| state.info_message = Option::from(format!("Episode saved successfully"))); + match call_save_episode(&server_name.unwrap(), &api_key.flatten(), &request).await { + Ok(success_message) => { + let formatted_info = format_error_message(&success_message.to_string()); + post_state.reduce_mut(|state| { + state.info_message = Option::from(format!("{}", formatted_info)); + if !state.saved_episode_ids().any(|id| id == episode.episodeid) { + state.saved_episodes.push(ep); + } + }); + } + Err(e) => { + let formatted_error = format_error_message(&e.to_string()); + post_state.reduce_mut(|state| { + state.error_message = Option::from(format!("{}", formatted_error)) + }); + // Handle error, e.g., display the error message + } + } + }; + wasm_bindgen_futures::spawn_local(future); + // dropdown_open.set(false); + }) + }; + + let remove_saved_api_key = api_key.clone(); + let remove_saved_server_name = server_name.clone(); + let dispatch_clone = post_dispatch.clone(); + let on_remove_saved_episode = { + let episode = props.episode.clone(); + let episode_id = props.episode.episodeid; + Callback::from(move |_| { + let post_dispatch = dispatch_clone.clone(); + let server_name_copy = remove_saved_server_name.clone(); + let api_key_copy = remove_saved_api_key.clone(); + let request = SavePodcastRequest { + episode_id: episode.episodeid, + user_id: user_id.unwrap(), + is_youtube: episode.is_youtube, + }; + let server_name = server_name_copy; // replace with the actual server name + let api_key = api_key_copy; // replace with the actual API key + let future = async move { + match call_remove_saved_episode(&server_name.unwrap(), &api_key.flatten(), &request) + .await + { + Ok(success_message) => { + let formatted_info = format_error_message(&success_message.to_string()); + + // queue_post.reduce_mut(|state| state.info_message = Option::from(format!("{}", success_message))); + post_dispatch.reduce_mut(|state| { + state + .saved_episodes + .retain(|e| e.episodeid != episode.episodeid); + state.info_message = Some(format!("{}", formatted_info).to_string()); + }); + } + Err(e) => { + let formatted_error = format_error_message(&e.to_string()); + post_dispatch.reduce_mut(|state| { + state.error_message = Option::from(format!("{}", formatted_error)) + }); + // Handle error, e.g., display the error message + } + } + }; + wasm_bindgen_futures::spawn_local(future); + // dropdown_open.set(false); + }) + }; + + let is_saved = post_state + .saved_episodes + .iter() + .any(|e| e.episodeid == check_episode_id); + + let on_toggle_save = { + let on_save_episode = on_save_episode.clone(); + let on_remove_saved_episode = on_remove_saved_episode.clone(); + Callback::from(move |_| { + if is_saved { + on_remove_saved_episode.emit(()); + } else { + on_save_episode.emit(()); + } + }) + }; + + let download_api_key = api_key.clone(); + let download_server_name = server_name.clone(); + let download_post = post_dispatch.clone(); + let on_server_download_episode = { + let episode = props.episode.clone(); + Callback::from(move |_| { + let post_state = download_post.clone(); + let server_name_copy = download_server_name.clone(); + let api_key_copy = download_api_key.clone(); + let request = DownloadEpisodeRequest { + episode_id: episode.episodeid, + user_id: user_id.unwrap(), // replace with the actual user ID + is_youtube: episode.is_youtube, + }; + let server_name = server_name_copy; // replace with the actual server name + let api_key = api_key_copy; // replace with the actual API key + let episode = episode.clone(); + let future = async move { + // let _ = call_download_episode(&server_name.unwrap(), &api_key.flatten(), &request).await; + // post_state.reduce_mut(|state| state.info_message = Option::from(format!("Episode now downloading!"))); + match call_download_episode(&server_name.unwrap(), &api_key.flatten(), &request) + .await + { + Ok(success_message) => { + post_state.reduce_mut(|state| { + state.info_message = Option::from(format!("{}", success_message)); + state.downloaded_episodes.push_server(episode); + }); + } + Err(e) => { + let formatted_error = format_error_message(&e.to_string()); + post_state.reduce_mut(|state| { + state.error_message = Option::from(format!("{}", formatted_error)) + }); + // Handle error, e.g., display the error message + } + } + }; + wasm_bindgen_futures::spawn_local(future); + // dropdown_open.set(false); + }) + }; + + let remove_download_api_key = api_key.clone(); + let remove_download_server_name = server_name.clone(); + let dispatch_clone = post_dispatch.clone(); + let on_remove_downloaded_episode = { + let episode = props.episode.clone(); + let episode_id = props.episode.episodeid; + Callback::from(move |_| { + let post_dispatch = dispatch_clone.clone(); + let server_name_copy = remove_download_server_name.clone(); + let api_key_copy = remove_download_api_key.clone(); + let request = DownloadEpisodeRequest { + episode_id: episode.episodeid, + user_id: user_id.unwrap(), // replace with the actual user ID + is_youtube: episode.is_youtube, + }; + let server_name = server_name_copy; // replace with the actual server name + let api_key = api_key_copy; // replace with the actual API key + let future = async move { + // let _ = call_download_episode(&server_name.unwrap(), &api_key.flatten(), &request).await; + // post_state.reduce_mut(|state| state.info_message = Option::from(format!("Episode now downloading!"))); + match call_remove_downloaded_episode( + &server_name.unwrap(), + &api_key.flatten(), + &request, + ) + .await + { + Ok(success_message) => { + let formatted_info = format_error_message(&success_message.to_string()); + + post_dispatch.reduce_mut(|state| { + state.downloaded_episodes.remove_local(episode.episodeid); + state.info_message = Some(format!("{}", formatted_info).to_string()); + }); + } + Err(e) => { + let formatted_error = format_error_message(&e.to_string()); + post_dispatch.reduce_mut(|state| { + state.error_message = Option::from(format!("{}", formatted_error)) + }); + // Handle error, e.g., display the error message + } + } + }; + wasm_bindgen_futures::spawn_local(future); + // dropdown_open.set(false); + }) + }; + + let is_downloaded = post_state + .downloaded_episodes + .is_server_download(check_episode_id); + + #[cfg(not(feature = "server_build"))] + let is_locally_downloaded = post_state + .downloaded_episodes + .is_local_download(check_episode_id); + + let on_toggle_download = { + let on_download = on_server_download_episode.clone(); + let on_remove_download = on_remove_downloaded_episode.clone(); + Callback::from(move |_| { + if is_downloaded { + on_remove_download.emit(()); + } else { + on_download.emit(()); + } + }) + }; + + #[cfg(not(feature = "server_build"))] + let on_local_episode_download = { + let episode = props.episode.clone(); + let download_local_post = post_dispatch.clone(); + let server_name_copy = server_name.clone(); + let api_key_copy = api_key.clone(); + let user_id_copy = user_id.clone(); + + Callback::from(move |_| { + let post_state = download_local_post.clone(); + let episode_id = episode.episodeid; + let request = EpisodeRequest { + episode_id, + user_id: user_id_copy.unwrap(), + person_episode: false, + is_youtube: episode.is_youtube, + }; + let server_name = server_name_copy.clone().unwrap(); + let ep_api_key = api_key_copy.clone().flatten(); + let api_key = api_key_copy.clone().flatten(); + + let episode = episode.clone(); + let future = async move { + match call_get_episode_metadata(&server_name, ep_api_key, &request).await { + Ok(episode_info) => { + let audio_url = episode_info.episodeurl.clone(); + let artwork_url = episode_info.episodeartwork.clone(); + let podcast_id = episode_info.podcastid.clone(); + let filename = format!("episode_{}.mp3", episode_id); + let artwork_filename = format!("artwork_{}.jpg", episode_id); + post_state.reduce_mut(|state| { + state.info_message = Some(format!("Episode download queued!")); + + // Add to locally downloaded episodes list + state.downloaded_episodes.push_local(episode); + }); + // Download audio + match download_file(audio_url, filename.clone()).await { + Ok(_) => {} + Err(e) => { + post_state.reduce_mut(|state| { + let formatted_error = format_error_message(&format!("{:?}", e)); + state.error_message = Some(format!( + "Failed to download episode audio: {}", + formatted_error.clone() + )) + }); + web_sys::console::log_1(&format!("audio fail: {:?}", e).into()); + } + } + + // Download artwork + if let Err(e) = download_file(artwork_url, artwork_filename.clone()).await { + post_state.reduce_mut(|state| { + let formatted_error = format_error_message(&format!("{:?}", e)); + state.error_message = Some(format!( + "Failed to download episode artwork: {}", + formatted_error.clone() + )) + }); + web_sys::console::log_1(&format!("art fail: {:?}", e).into()); + } + + // Update local JSON database + if let Err(e) = update_local_database(episode_info.clone()).await { + post_state.reduce_mut(|state| { + let formatted_error = format_error_message(&format!("{:?}", e)); + state.error_message = Some(format!( + "Failed to update local database: {}", + formatted_error.clone() + )) + }); + web_sys::console::log_1( + &format!("Unable to parse Podcasts: {:?}", e).into(), + ); + } + + // Fetch and update local podcast metadata + match call_get_podcast_details( + &server_name, + &api_key.unwrap(), + user_id_copy.unwrap(), + podcast_id, + ) + .await + { + Ok(podcast_details) => { + if let Err(e) = update_podcast_database(podcast_details).await { + post_state.reduce_mut(|state| { + let formatted_error = + format_error_message(&format!("{:?}", e)); + state.error_message = Some(format!( + "Failed to update podcast database: {}", + formatted_error + )) + }); + } + } + Err(e) => { + post_state.reduce_mut(|state| { + let formatted_error = format_error_message(&e.to_string()); + state.error_message = Some(format!( + "Failed to fetch podcast metadata: {:?}", + formatted_error + )) + }); + } + } + } + Err(e) => { + let formatted_error = format_error_message(&e.to_string()); + post_state.reduce_mut(|state| { + state.error_message = Some(format!("s {:?}", formatted_error)) + }); + } + } + }; + + wasm_bindgen_futures::spawn_local(future); + }) + }; + + #[cfg(not(feature = "server_build"))] + let ui_dispatch = _ui_dispatch.clone(); + + #[cfg(not(feature = "server_build"))] + let on_remove_locally_downloaded_episode = { + let episode = props.episode.clone(); + let download_ui_dispatch = ui_dispatch.clone(); + let download_local_post = post_dispatch.clone(); + + Callback::from(move |_: MouseEvent| { + let post_state = download_local_post.clone(); + let ui_state = download_ui_dispatch.clone(); + let episode_id = episode.episodeid; + + let future = async move { + let filename = format!("episode_{}.mp3", episode_id); + + // Download audio + match remove_episode_from_local_db(episode_id).await { + Ok(_) => { + // Update info_message and remove from locally_downloaded_episodes + post_state.reduce_mut(|state| { + state.info_message = + Some(format!("Local episode {} deleted!", filename)); + + // Remove from locally downloaded episodes list + state.downloaded_episodes.remove_local(episode_id); + }); + + // Update local_download_increment in ui_state + ui_state.reduce_mut(|state| { + if let Some(increment) = state.local_download_increment.as_mut() { + *increment += 1; + } else { + state.local_download_increment = Some(1); + } + }); + } + Err(e) => { + let formatted_error = format_error_message(&format!("{:?}", e)); + post_state.reduce_mut(|state| { + state.error_message = Some(format!( + "Failed to download episode audio: {}", + formatted_error + )) + }); + } + } + }; + + wasm_bindgen_futures::spawn_local(future); + }) + }; + + let uncomplete_api_key = api_key.clone(); + let uncomplete_server_name = server_name.clone(); + let uncomplete_dispatch_clone = post_dispatch.clone(); + let on_uncomplete_episode = { + let episode = props.episode.clone(); + let episode_id = props.episode.episodeid; + Callback::from(move |_| { + let post_dispatch = uncomplete_dispatch_clone.clone(); + let server_name_copy = uncomplete_server_name.clone(); + let api_key_copy = uncomplete_api_key.clone(); + let request = MarkEpisodeCompletedRequest { + episode_id: episode.episodeid, + user_id: user_id.unwrap(), // replace with the actual user ID + is_youtube: episode.is_youtube, + }; + let server_name = server_name_copy; // replace with the actual server name + let api_key = api_key_copy; // replace with the actual API key + let future = async move { + // let _ = call_download_episode(&server_name.unwrap(), &api_key.flatten(), &request).await; + // post_state.reduce_mut(|state| state.info_message = Option::from(format!("Episode now downloading!"))); + match call_mark_episode_uncompleted( + &server_name.unwrap(), + &api_key.flatten(), + &request, + ) + .await + { + Ok(success_message) => { + // queue_post.reduce_mut(|state| state.info_message = Option::from(format!("{}", success_message))); + post_dispatch.reduce_mut(|state| { + if let Some(completed_episodes) = state.completed_episodes.as_mut() { + if let Some(pos) = + completed_episodes.iter().position(|&id| id == episode_id) + { + completed_episodes.remove(pos); + } else { + completed_episodes.push(episode_id); + } + } else { + state.completed_episodes = Some(vec![episode_id]); + } + state.info_message = Some(format!("{}", success_message)); + }); + } + Err(e) => { + post_dispatch.reduce_mut(|state| { + state.error_message = Option::from(format!("{}", e)) + }); + // Handle error, e.g., display the error message + } + } + }; + wasm_bindgen_futures::spawn_local(future); + // dropdown_open.set(false); + }) + }; + + let complete_api_key = api_key.clone(); + let complete_server_name = server_name.clone(); + let dispatch_clone = post_dispatch.clone(); + let on_complete_episode = { + let episode = props.episode.clone(); + let episode_id = props.episode.episodeid; + Callback::from(move |_| { + let post_dispatch = dispatch_clone.clone(); + let server_name_copy = complete_server_name.clone(); + let api_key_copy = complete_api_key.clone(); + let request = MarkEpisodeCompletedRequest { + episode_id: episode.episodeid, + user_id: user_id.unwrap(), // replace with the actual user ID + is_youtube: episode.is_youtube, + }; + let server_name = server_name_copy; // replace with the actual server name + let api_key = api_key_copy; // replace with the actual API key + let future = async move { + // let _ = call_download_episode(&server_name.unwrap(), &api_key.flatten(), &request).await; + // post_state.reduce_mut(|state| state.info_message = Option::from(format!("Episode now downloading!"))); + match call_mark_episode_completed( + &server_name.unwrap(), + &api_key.flatten(), + &request, + ) + .await + { + Ok(success_message) => { + // queue_post.reduce_mut(|state| state.info_message = Option::from(format!("{}", success_message))); + post_dispatch.reduce_mut(|state| { + if let Some(completed_episodes) = state.completed_episodes.as_mut() { + if let Some(pos) = + completed_episodes.iter().position(|&id| id == episode_id) + { + completed_episodes.remove(pos); + } else { + completed_episodes.push(episode_id); + } + } else { + state.completed_episodes = Some(vec![episode_id]); + } + state.info_message = Some(format!("{}", success_message)); + }); + } + Err(e) => { + let formatted_error = format_error_message(&e.to_string()); + post_dispatch.reduce_mut(|state| { + state.error_message = Option::from(format!("{}", formatted_error)) + }); + // Handle error, e.g., display the error message + } + } + }; + wasm_bindgen_futures::spawn_local(future); + // dropdown_open.set(false); + }) + }; + + let is_completed = post_state + .completed_episodes + .as_ref() + .unwrap_or(&vec![]) + .contains(&check_episode_id); + + let on_toggle_complete = { + let on_complete_episode = on_complete_episode.clone(); + let on_uncomplete_episode = on_uncomplete_episode.clone(); + let is_completed = is_completed.clone(); + + Callback::from(move |_| { + if is_completed { + on_uncomplete_episode.emit(()); + } else { + on_complete_episode.emit(()); + } + }) + }; + + let close_dropdown = { + let dropdown_open = dropdown_open.clone(); + let on_close = props.on_close.clone(); + let show_menu_only = props.show_menu_only; + + Callback::from(move |_| { + dropdown_open.set(false); + + // If this is a long press menu, also call the on_close callback + if show_menu_only { + if let Some(on_close) = &on_close { + on_close.emit(()); + } + } + }) + }; + + let wrap_action = |action: Callback| { + let close = close_dropdown.clone(); + Callback::from(move |e: MouseEvent| { + action.emit(e); + close.emit(()); + }) + }; + + #[cfg(feature = "server_build")] + let download_button = html! { + + }; + + #[cfg(not(feature = "server_build"))] + let download_button = html! { + <> + + { + if is_locally_downloaded { + html! { + + } + } else { + html! { + + } + } + } + + }; + + #[cfg(not(feature = "server_build"))] + let local_download_options = html! { + <> + + + + { + if is_locally_downloaded { + html! { + + } + } else { + html! { + + } + } + } + + + }; + + #[cfg(feature = "server_build")] + let local_download_options = html! {}; + let action_buttons = match props.page_type { + PageType::Saved => html! { + <> + + + { + download_button.clone() + } + + + }, + PageType::Queue => html! { + <> + + + { + download_button.clone() + } + + + }, + PageType::Downloads => html! { + <> + + + { + download_button.clone() + } + + + }, + PageType::LocalDownloads => html! { + local_download_options + }, + PageType::Default => html! { + <> + + + { + download_button.clone() + } + + + }, + }; + + html! { +
+ if !props.show_menu_only { + + } + if *dropdown_open { + + } +
+ } +} diff --git a/web/src/components/episode_list_item.rs b/web/src/components/episode_list_item.rs new file mode 100644 index 00000000..8bbbd767 --- /dev/null +++ b/web/src/components/episode_list_item.rs @@ -0,0 +1,594 @@ +use crate::components::audio::on_play_click; +use crate::components::context::{AppState, ExpandedDescriptions, UIState}; +use crate::components::gen_components::{on_shownotes_click, EpisodeModal, FallbackImage}; + +use crate::components::context_menu_button::{ContextMenuButton, PageType}; +use crate::components::gen_funcs::{ + format_datetime, match_date_format, parse_date, sanitize_html_with_blank_target, use_long_press, +}; +use crate::components::gen_funcs::{format_time, strip_images_from_html}; +use crate::components::safehtml::SafeHtml; +use crate::components::virtual_list::DragCallbacks; +use crate::requests::episode::Episode; +use gloo::history::BrowserHistory; +use gloo_events::EventListener; +use wasm_bindgen::prelude::*; +use web_sys::{window, MouseEvent}; +use yew::prelude::*; +use yew::Callback; +use yewdux::prelude::*; + +#[allow(dead_code)] +#[derive(Properties, PartialEq, Clone)] +pub struct EpisodeListItemProps { + pub episode: Episode, + // pub _is_expanded: bool, + // pub toggle_expanded: Callback, + #[prop_or(PageType::Default)] + pub page_type: PageType, + #[prop_or_default] + pub on_checkbox_change: Callback, + #[prop_or_default] + pub drag_callbacks: DragCallbacks, + #[prop_or_default] + pub is_delete_mode: bool, +} + +#[function_component(EpisodeListItem)] +pub fn episode_list_item(props: &EpisodeListItemProps) -> Html { + // Use selective subscriptions to only re-render when relevant state changes + let episode_id = props.episode.episodeid; + + // Only subscribe to the specific fields we need for RENDERING + let is_completed = use_selector(move |state: &AppState| { + state.completed_episodes + .as_ref() + .unwrap_or(&vec![]) + .contains(&episode_id) + }); + let auth_details = use_selector(|state: &AppState| state.auth_details.clone()); + let user_details = use_selector(|state: &AppState| state.user_details.clone()); + let date_format = use_selector(|state: &AppState| state.date_format.clone()); + let user_tz = use_selector(|state: &AppState| state.user_tz.clone()); + let hour_preference = use_selector(|state: &AppState| state.hour_preference); + let selected_for_deletion = use_selector(move |state: &AppState| { + state.selected_episodes_for_deletion.contains(&episode_id) + }); + let podcast_added = use_selector(|state: &AppState| state.podcast_added); + let is_downloaded_server = use_selector(move |state: &AppState| { + state.downloaded_episodes.is_server_download(episode_id) + }); + let is_downloaded_local = use_selector(move |state: &AppState| { + state.downloaded_episodes.is_local_download(episode_id) + }); + + // We still need the dispatcher for actions + let (_app_state, app_dispatch) = use_store::(); + + let (audio_state, audio_dispatch) = use_store::(); + let (desc_state, desc_dispatch) = use_store::(); + + // DEBUG: Log re-renders to confirm the fix works + web_sys::console::log_1(&format!("EpisodeListItem render: {}", props.episode.episodetitle).into()); + + /* + Item Shape + */ + let container_height: UseStateHandle = use_state(|| "221px".to_string()); // Should be em? + + let is_narrow_viewport = { + let window = web_sys::window().expect("no global window exists"); + window.inner_width().unwrap().as_f64().unwrap() < 500.0 + }; + + // resize evt listener + { + let container_height = container_height.clone(); + use_effect_with((), move |_| { + let update_height = { + let container_height = container_height.clone(); + Callback::from(move |_| { + if let Some(window) = window() { + if let Ok(width) = window.inner_width() { + if let Some(width) = width.as_f64() { + let new_height = if width <= 530.0 { + "122px" + } else if width <= 768.0 { + "150px" + } else { + "221px" + }; + container_height.set(new_height.to_string()); + } + } + } + }) + }; + + update_height.emit(()); + + let listener = EventListener::new(&window().unwrap(), "resize", move |_| { + update_height.emit(()); + }); + + move || drop(listener) + }); + } + + // let desc_expanded = desc_state + // .expanded_descriptions + // .contains(&props.episode.episodeid.to_string()); + + // #[wasm_bindgen] + // extern "C" { + // #[wasm_bindgen(js_namespace = window)] + // fn toggleDescription(guid: &str, expanded: bool); + // } + // let toggle_expanded = { + // let desc_dispatch = desc_dispatch.clone(); + // let episode_guid = props.episode.episodeid.clone().to_string(); + + // Callback::from(move |_: MouseEvent| { + // let guid = episode_guid.clone(); + // desc_dispatch.reduce_mut(move |state| { + // if state.expanded_descriptions.contains(&guid) { + // state.expanded_descriptions.remove(&guid); + // toggleDescription(&guid, false); + // } else { + // state.expanded_descriptions.insert(guid.clone()); + // toggleDescription(&guid, true); + // } + // }); + // }) + // }; + + /* + Modal interactions + */ + let show_modal = use_state(|| false); + + let on_modal_open = { + let show_modal = show_modal.clone(); + Callback::from(move |_: i32| show_modal.set(true)) + }; + + let on_modal_close: Callback = { + let show_modal = show_modal.clone(); + Callback::from(move |_: MouseEvent| show_modal.set(false)) + }; + + /* + Audio Player + */ + let is_current_episode = audio_state + .currently_playing + .as_ref() + .map_or(false, |current| { + current.episode_id == props.episode.episodeid + }); + + let is_playing = audio_state.audio_playing.unwrap_or(false); + + let formatted_pub_date = { + let date_format = match_date_format(date_format.as_deref()); + let datetime = parse_date(&props.episode.episodepubdate, &user_tz); + format_datetime(&datetime, &hour_preference, date_format) + }; + + let api_key = auth_details + .as_ref() + .as_ref() + .map(|ud| ud.api_key.clone().unwrap()) + .unwrap(); + let user_id = user_details + .as_ref() + .as_ref() + .map(|ud| ud.UserID.clone()) + .unwrap(); + let server_name = auth_details + .as_ref() + .as_ref() + .map(|ud| ud.server_name.clone()) + .unwrap(); + + // Compute is_local inline instead of passing it via app_state + let is_local = if podcast_added.unwrap_or(false) && props.episode.episodeid != 0 { + *is_downloaded_server || { + #[cfg(not(feature = "server_build"))] + { + *is_downloaded_local + } + #[cfg(feature = "server_build")] + { + false + } + } + } else { + false + }; + + // Inline on_play_pause logic to avoid needing app_state + let on_play_pause = { + let episode = props.episode.clone(); + let api_key = api_key.clone(); + let user_id = user_id.clone(); + let server_name = server_name.clone(); + let audio_dispatch = audio_dispatch.clone(); + let audio_state = audio_state.clone(); + let is_local = is_local; + + Callback::from(move |e: MouseEvent| { + let is_current = audio_state + .currently_playing + .as_ref() + .map_or(false, |current| current.episode_id == episode.episodeid); + if is_current { + audio_dispatch.reduce_mut(|state| { + let currently_playing = state.audio_playing.unwrap_or(false); + state.audio_playing = Some(!currently_playing); + if let Some(audio) = &state.audio_element { + if currently_playing { + let _ = audio.pause(); + } else { + let _ = audio.play(); + } + } + }); + } else { + on_play_click( + episode.clone(), + api_key.clone(), + user_id, + server_name.clone(), + audio_dispatch.clone(), + audio_state.clone(), + is_local, + ) + .emit(e); + } + }) + }; + + /* + Episode Information + */ + + let episode_description = sanitize_html_with_blank_target(&props.episode.episodedescription); + + let (_listen_duration_str, listen_duration_percentage) = { + let lds = format_time(props.episode.listenduration); + let ldp = if props.episode.listenduration > 0 { + ((props.episode.listenduration * 100) / props.episode.episodeduration).min(100) + } else { + 0 + }; + (lds, ldp) + }; + + let episode_duration_str = format_time(props.episode.episodeduration); + + // is_completed is already defined via use_selector above + + let checkbox_ep = props.episode.episodeid; + + // Handle context menu position + // let context_menu_style = if props.show_context_menu { + // format!( + // "position: fixed; top: {}px; left: {}px; z-index: 1000;", + // props.context_menu_position.1, props.context_menu_position.0 + // ) + // } else { + // String::new() + // }; + + /* + Set up Context Menu + */ + let show_context_menu = use_state(|| false); + let context_menu_position = use_state(|| (0, 0)); + + // Long press handler - simulate clicking the context button + let context_button_ref = use_node_ref(); + let on_long_press = { + let context_button_ref = context_button_ref.clone(); + let show_context_menu = show_context_menu.clone(); + let context_menu_position = context_menu_position.clone(); + + Callback::from(move |event: TouchEvent| { + if let Some(touch) = event.touches().get(0) { + event.prevent_default(); + // Record position for the context menu + context_menu_position.set((touch.client_x(), touch.client_y())); + + // Find and click the context button (if it exists) + if let Some(button) = context_button_ref.cast::() { + button.click(); + } else { + // If the button doesn't exist (maybe on mobile where it's hidden) + // we'll just set our state to show the menu + show_context_menu.set(true); + } + } + }) + }; + + // Close context menu callback + let close_context_menu: Callback<()> = { + let show_context_menu = show_context_menu.clone(); + Callback::from(move |_| { + show_context_menu.set(false); + }) + }; + + // Setup long press detection + let (on_touch_start, on_touch_end, on_touch_move, is_long_press_state, is_pressing_state) = + use_long_press(on_long_press, Some(600)); // 600ms for long press + + // When long press is detected through the hook, update our state + { + let show_context_menu = show_context_menu.clone(); + use_effect_with(is_long_press_state, move |is_pressed| { + if **is_pressed { + show_context_menu.set(true); + } + || () + }); + } + + /* + Show-Notes + */ + let browser_history = BrowserHistory::new(); + let on_shownotes_click = { + let is_local_for_shownotes = *is_downloaded_local; + let src = if props.episode.episodeurl.contains("youtube.com") { + format!( + "{}/api/data/stream/{}?api_key={}&user_id={}&type=youtube", + server_name, props.episode.episodeid, api_key, user_id + ) + } else if is_local_for_shownotes { + format!( + "{}/api/data/stream/{}?api_key={}&user_id={}", + server_name, props.episode.episodeid, api_key, user_id + ) + } else { + props.episode.episodeurl.clone() + }; + + on_shownotes_click( + browser_history.clone(), + app_dispatch.clone(), + props.episode.episodeid.clone(), + props.episode.feedurl.clone(), + src, + props.episode.episodetitle.clone(), + true, + false, + props.episode.is_youtube, + ) + }; + + #[wasm_bindgen] + extern "C" { + #[wasm_bindgen(js_namespace = window)] + fn toggleDescription(guid: &str, expanded: bool); + } + + html! { +
+
+ + { + if props.drag_callbacks.draggable() + { + html!{ +
+ +
+ } + } else { + html!{ } + } + } + + { + if props.is_delete_mode { + html! { +
+ +
+ } + } else { + html! {} + } + } + +
+ +
+
+
+

+ {props.episode.episodetitle.clone()} +

+ { + if *is_completed { + html! { + + } + } else { + html! {} + } + } +
+ + { + html! { + + } + } + +
+ + + { formatted_pub_date.clone() } + +
+ { + if *is_completed { + if is_narrow_viewport { + html! { +
+ {"Completed"} +
+ } + } else { + html! { +
+ { episode_duration_str } + { "- Completed" } +
+ } + } + } else { + if props.episode.listenduration > 0 { + html! { +
+ { + if !is_narrow_viewport { + html! { + { format_time(props.episode.listenduration) } + } + } else { + html! {} + } + } +
+
+
+ { episode_duration_str } +
+ } + } else { + html! { + { episode_duration_str } + } + } + } + } +
+ { + html! { +
+ // only show links if there is a url to link to + if !props.episode.episodeurl.is_empty() { + + + + } +
+ } + } +
+ + // This shows the context menu via long press + { + if *show_context_menu { + html! { + + } + } else { + html! {} + } + } + + if *show_modal { + + } +
+ } +} diff --git a/web/src/components/episodes_layout.rs b/web/src/components/episodes_layout.rs deleted file mode 100644 index ef9af8de..00000000 --- a/web/src/components/episodes_layout.rs +++ /dev/null @@ -1,2984 +0,0 @@ -use super::app_drawer::App_drawer; -use super::gen_components::{FallbackImage, Search_nav, UseScrollToTop}; -use crate::components::audio::AudioPlayer; -use crate::components::click_events::create_on_title_click; -use crate::components::context::{AppState, UIState}; -use crate::components::gen_funcs::{ - format_error_message, get_filter_preference, set_filter_preference, get_default_sort_direction, -}; -use crate::components::host_component::HostDropdown; -use crate::components::podcast_layout::ClickedFeedURL; -use crate::components::virtual_list::PodcastEpisodeVirtualList; -use crate::requests::pod_req::{ - call_add_category, call_add_podcast, call_adjust_skip_times, call_bulk_download_episodes, - call_bulk_mark_episodes_completed, call_bulk_queue_episodes, call_bulk_save_episodes, - call_check_podcast, call_clear_playback_speed, call_download_all_podcast, - call_enable_auto_download, call_fetch_podcasting_2_pod_data, call_get_auto_download_status, - call_get_feed_cutoff_days, call_get_play_episode_details, call_get_podcast_id_from_ep, - call_get_podcast_id_from_ep_name, call_get_podcast_notifications_status, call_get_rss_key, - call_remove_category, call_remove_podcasts_name, call_remove_youtube_channel, - call_set_playback_speed, call_toggle_podcast_notifications, call_update_feed_cutoff_days, - AddCategoryRequest, AutoDownloadRequest, BulkEpisodeActionRequest, - ClearPlaybackSpeedRequest, DownloadAllPodcastRequest, FetchPodcasting2PodDataRequest, - PlaybackSpeedRequest, PodcastValues, RemoveCategoryRequest, RemovePodcastValuesName, - RemoveYouTubeChannelValues, SkipTimesRequest, UpdateFeedCutoffDaysRequest, -}; -use crate::requests::search_pods::call_get_podcast_details_dynamic; -use crate::requests::search_pods::call_get_podcast_episodes; -use htmlentity::entity::decode; -use htmlentity::entity::ICodedDataTrait; -use std::collections::{HashMap, HashSet}; -use std::rc::Rc; -use wasm_bindgen::closure::Closure; -use wasm_bindgen::prelude::*; -use wasm_bindgen::JsCast; -use wasm_bindgen_futures::spawn_local; -use web_sys::Element; -use web_sys::{window, Event, HtmlInputElement, MouseEvent, UrlSearchParams}; -use yew::prelude::*; -use yew::Properties; -use yew::{function_component, html, use_effect_with, use_node_ref, Callback, Html, TargetCast}; -use yew_router::history::{BrowserHistory, History}; -use yewdux::prelude::*; - -fn add_icon() -> Html { - html! { - - } -} - -fn payments_icon() -> Html { - html! { - - } -} - -fn rss_icon() -> Html { - html! { - - } -} - -fn website_icon() -> Html { - html! { - - } -} - -fn trash_icon() -> Html { - html! { - - - } -} -fn settings_icon() -> Html { - html! { - - - } -} -fn download_icon() -> Html { - html! { - - - } -} -fn no_icon() -> Html { - html! {} -} - -#[allow(dead_code)] -fn play_icon() -> Html { - html! { - - } -} - -#[allow(dead_code)] -fn pause_icon() -> Html { - html! { - - } -} - -#[derive(Properties, PartialEq)] -pub struct Props { - pub html: String, -} - -fn sanitize_html(html: &str) -> String { - let cleaned_html = ammonia::clean(html); - let decoded_data = decode(cleaned_html.as_bytes()); - match decoded_data.to_string() { - Ok(decoded_html) => decoded_html, - Err(_) => String::from("Invalid HTML content"), - } -} - -fn get_rss_base_url() -> String { - let window = window().expect("no global `window` exists"); - let location = window.location(); - let current_url = location - .href() - .unwrap_or_else(|_| "Unable to retrieve URL".to_string()); - - if let Some(storage) = window.local_storage().ok().flatten() { - if let Ok(Some(auth_state)) = storage.get_item("userAuthState") { - if let Ok(json) = serde_json::from_str::(&auth_state) { - if let Some(server_name) = json - .get("auth_details") - .and_then(|auth| auth.get("server_name")) - .and_then(|name| name.as_str()) - { - return format!("{}/rss", server_name); - } - } - } - } - // Fallback to using the current URL's origin - format!( - "{}/rss", - current_url.split('/').take(3).collect::>().join("/") - ) -} - -pub enum AppStateMsg { - ExpandEpisode(String), - CollapseEpisode(String), -} - -impl Reducer for AppStateMsg { - fn apply(self, mut state: Rc) -> Rc { - let state_mut = Rc::make_mut(&mut state); - - match self { - AppStateMsg::ExpandEpisode(guid) => { - state_mut.expanded_descriptions.insert(guid); - } - AppStateMsg::CollapseEpisode(guid) => { - state_mut.expanded_descriptions.remove(&guid); - } - } - - // Return the Rc itself, not a reference to it - state - } -} - -#[derive(Clone, PartialEq)] -pub enum EpisodeSortDirection { - NewestFirst, - OldestFirst, - ShortestFirst, - LongestFirst, - TitleAZ, - TitleZA, -} - -#[function_component(EpisodeLayout)] -pub fn episode_layout() -> Html { - let is_added = use_state(|| false); - let (state, _dispatch) = use_store::(); - let (search_state, _search_dispatch) = use_store::(); - let podcast_feed_results = search_state.podcast_feed_results.clone(); - let clicked_podcast_info = search_state.clicked_podcast_info.clone(); - let loading = use_state(|| true); - let page_state = use_state(|| PageState::Hidden); - let episode_search_term = use_state(|| String::new()); - - // Initialize sort direction from local storage or default to newest first - let episode_sort_direction = use_state(|| { - let saved_preference = get_filter_preference("episodes"); - match saved_preference.as_deref() { - Some("newest") => Some(EpisodeSortDirection::NewestFirst), - Some("oldest") => Some(EpisodeSortDirection::OldestFirst), - Some("shortest") => Some(EpisodeSortDirection::ShortestFirst), - Some("longest") => Some(EpisodeSortDirection::LongestFirst), - Some("title_az") => Some(EpisodeSortDirection::TitleAZ), - Some("title_za") => Some(EpisodeSortDirection::TitleZA), - _ => Some(EpisodeSortDirection::NewestFirst), // Default to newest first - } - }); - - let completed_filter_state = use_state(|| CompletedFilter::ShowAll); - let show_in_progress = use_state(|| false); - let notification_status = use_state(|| false); - let feed_cutoff_days = use_state(|| 0); - let feed_cutoff_days_input = use_state(|| "0".to_string()); - let playback_speed = use_state(|| 1.0); - let playback_speed_input = playback_speed.clone(); - let playback_speed_clone = playback_speed.clone(); - let rss_key_state = use_state(|| None::); - - // Bulk selection state - let selected_episodes = use_state(|| HashSet::::new()); - let is_selecting = use_state(|| false); - - let history = BrowserHistory::new(); - // let node_ref = use_node_ref(); - let user_id = search_state - .user_details - .as_ref() - .map(|ud| ud.UserID.clone()); - let api_key = search_state - .auth_details - .as_ref() - .map(|ud| ud.api_key.clone()); - let server_name = search_state - .auth_details - .as_ref() - .map(|ud| ud.server_name.clone()); - let podcast_added = search_state.podcast_added.unwrap_or_default(); - let pod_url = use_state(|| String::new()); - let new_category = use_state(|| String::new()); - - let new_cat_in = new_category.clone(); - let new_category_input = Callback::from(move |e: InputEvent| { - if let Some(input_element) = e.target_dyn_into::() { - let value = input_element.value(); // Get the value as a String - new_cat_in.set(value); // Set the state with the String - } - }); - - // Add this near the start of the component - let audio_dispatch = _dispatch.clone(); - - // Clear podcast metadata when component mounts - use_effect_with((), move |_| { - audio_dispatch.reduce_mut(|state| { - state.podcast_value4value = None; - state.podcast_funding = None; - state.podcast_podroll = None; - state.podcast_people = None; - }); - || () - }); - - { - let audio_dispatch = _dispatch.clone(); - - // Initial check when the component is mounted - { - let window = window().unwrap(); - let width = window.inner_width().unwrap().as_f64().unwrap(); - let new_is_mobile = width < 768.0; - audio_dispatch.reduce_mut(|state| state.is_mobile = Some(new_is_mobile)); - } - - // Resize event listener - use_effect_with((), move |_| { - let window = window().unwrap(); - let closure_window = window.clone(); - let closure = Closure::wrap(Box::new(move || { - let width = closure_window.inner_width().unwrap().as_f64().unwrap(); - let new_is_mobile = width < 768.0; - audio_dispatch.reduce_mut(|state| state.is_mobile = Some(new_is_mobile)); - }) as Box); - - window - .add_event_listener_with_callback("resize", closure.as_ref().unchecked_ref()) - .unwrap(); - - closure.forget(); // Ensure the closure is not dropped prematurely - - || () - }); - } - - // On mount, check if the podcast is in the database - let effect_user_id = user_id.clone(); - let effect_api_key = api_key.clone(); - let loading_ep = loading.clone(); - - { - let is_added = is_added.clone(); - let podcast = clicked_podcast_info.clone(); - let user_id = effect_user_id.clone(); - let api_key = effect_api_key.clone(); - let server_name = server_name.clone(); - let click_dispatch = _search_dispatch.clone(); - let click_history = history.clone(); - let pod_load_url = pod_url.clone(); - let pod_loading_ep = loading.clone(); - - fn emit_click(callback: Callback) { - callback.emit(MouseEvent::new("click").unwrap()); - } - - use_effect_with( - (api_key.clone(), user_id.clone(), server_name.clone()), - move |(api_key, user_id, server_name)| { - if let (Some(api_key), Some(user_id), Some(server_name)) = - (api_key.clone(), user_id.clone(), server_name.clone()) - { - let is_added = is_added.clone(); - - if podcast.is_none() { - let window = web_sys::window().expect("no global window exists"); - let search_params = window.location().search().unwrap(); - let url_params = UrlSearchParams::new_with_str(&search_params).unwrap(); - - let podcast_title = url_params.get("podcast_title").unwrap_or_default(); - let podcast_url = url_params.get("podcast_url").unwrap_or_default(); - let podcast_index_id = 0; - if !podcast_title.is_empty() && !podcast_url.is_empty() { - let podcast_info = ClickedFeedURL { - podcastid: 0, - podcastname: podcast_title.clone(), - feedurl: podcast_url.clone(), - description: String::new(), - author: String::new(), - artworkurl: String::new(), - explicit: false, - episodecount: 0, - categories: None, - websiteurl: String::new(), - podcastindexid: podcast_index_id, - is_youtube: Some(false), - }; - - let api_key = api_key.clone(); - let user_id = user_id.clone(); - let server_name = server_name.clone(); - spawn_local(async move { - let added = call_check_podcast( - &server_name, - &api_key.clone().unwrap(), - user_id, - podcast_info.podcastname.as_str(), - podcast_info.feedurl.as_str(), - ) - .await - .unwrap_or_default() - .exists; - is_added.set(added); - - let podcast_details = call_get_podcast_details_dynamic( - &server_name, - &api_key.clone().unwrap(), - user_id, - podcast_info.podcastname.as_str(), - podcast_info.feedurl.as_str(), - podcast_info.podcastindexid, - added, - Some(false), - ) - .await - .unwrap(); - - fn categories_to_string( - categories: Option>, - ) -> Option { - categories.map(|map| { - map.values().cloned().collect::>().join(", ") - }) - } - let podcast_categories_str = - categories_to_string(podcast_details.details.categories); - - // Execute the same process as when a podcast is clicked - let on_title_click = create_on_title_click( - click_dispatch, - server_name, - Some(Some(api_key.clone().unwrap())), - &click_history, - podcast_details.details.podcastindexid, - podcast_details.details.podcastname, - podcast_details.details.feedurl, - podcast_details.details.description, - podcast_details.details.author, - podcast_details.details.artworkurl, - podcast_details.details.explicit, - podcast_details.details.episodecount, - podcast_categories_str, // assuming no categories in local storage - podcast_details.details.websiteurl, - user_id, - podcast_details.details.is_youtube.unwrap(), - ); - emit_click(on_title_click); - let window = web_sys::window().expect("no global window exists"); - let location = window.location(); - - let mut new_url = location.origin().unwrap(); - new_url.push_str(&location.pathname().unwrap()); - new_url.push_str("?podcast_title="); - new_url.push_str(&urlencoding::encode(&podcast_info.podcastname)); - new_url.push_str("&podcast_url="); - new_url.push_str(&urlencoding::encode(&podcast_info.feedurl)); - pod_load_url.set(new_url.clone()); - }); - } - } else { - let podcast = podcast.unwrap(); - - // Update the URL with query parameters - let window = web_sys::window().expect("no global window exists"); - let history = window.history().expect("should have a history"); - let location = window.location(); - - let mut new_url = location.origin().unwrap(); - new_url.push_str(&location.pathname().unwrap()); - new_url.push_str("?podcast_title="); - new_url.push_str(&urlencoding::encode(&podcast.podcastname)); - new_url.push_str("&podcast_url="); - new_url.push_str(&urlencoding::encode(&podcast.feedurl)); - - history - .replace_state_with_url( - &wasm_bindgen::JsValue::NULL, - "", - Some(&new_url), - ) - .expect("should push state"); - - let api_key = api_key.clone(); - let user_id = user_id.clone(); - let server_name = server_name.clone(); - spawn_local(async move { - let added = call_check_podcast( - &server_name, - &api_key.unwrap(), - user_id, - podcast.podcastname.as_str(), - podcast.feedurl.as_str(), - ) - .await - .unwrap_or_default() - .exists; - is_added.set(added); - if *is_added.clone() != true { - pod_loading_ep.set(false); - } - }); - } - } - || () - }, - ); - } - - let podcast_info = search_state.clicked_podcast_info.clone(); - let load_link = loading.clone(); - - use_effect_with(podcast_info.clone(), { - let pod_url = pod_url.clone(); - move |podcast_info| { - if let Some(info) = podcast_info { - let window = window().expect("no global window exists"); - let history = window.history().expect("should have a history"); - let location = window.location(); - - let mut new_url = location.origin().unwrap(); - new_url.push_str(&location.pathname().unwrap()); - new_url.push_str("?podcast_title="); - new_url.push_str(&urlencoding::encode(&info.podcastname)); - new_url.push_str("&podcast_url="); - new_url.push_str(&urlencoding::encode(&info.feedurl)); - pod_url.set(new_url.clone()); - load_link.set(false); - - history - .replace_state_with_url(&JsValue::NULL, "", Some(&new_url)) - .expect("should push state"); - } - || {} - } - }); - - let download_status = use_state(|| false); - let podcast_id = use_state(|| 0); - let start_skip = use_state(|| 0); - let end_skip = use_state(|| 0); - - { - let api_key = api_key.clone(); - let server_name = server_name.clone(); - let podcast_id = podcast_id.clone(); - let download_status = download_status.clone(); - let notification_effect = notification_status.clone(); - // let episode_name = episode_name_pre.clone(); - // let episode_url = episode_url_pre.clone(); - let user_id = search_state.user_details.as_ref().map(|ud| ud.UserID); - let effect_start_skip = start_skip.clone(); - let effect_end_skip = end_skip.clone(); - let effect_playback_speed = playback_speed.clone(); - let effect_added = is_added.clone(); - let feed_cutoff_days = feed_cutoff_days.clone(); - let feed_cutoff_days_input = feed_cutoff_days_input.clone(); - let audio_dispatch = _dispatch.clone(); - let click_state = search_state.clone(); - - use_effect_with( - ( - click_state.podcast_feed_results.clone(), - effect_added.clone(), - ), - move |_| { - let episode_name: Option = click_state - .podcast_feed_results - .as_ref() - .and_then(|results| results.episodes.get(0)) - .and_then(|episode| episode.title.clone()); - let episode_url: Option = click_state - .podcast_feed_results - .as_ref() - .and_then(|results| results.episodes.get(0)) - .and_then(|episode| episode.enclosure_url.clone()); - - let bool_true = *effect_added; // Dereference here - - if !bool_true { - } else { - let api_key = api_key.clone(); - let server_name = server_name.clone(); - let podcast_id = podcast_id.clone(); - let download_status = download_status.clone(); - let episode_name = episode_name; - let episode_url = episode_url; - let user_id = user_id.unwrap(); - - if episode_name.is_some() && episode_url.is_some() { - wasm_bindgen_futures::spawn_local(async move { - if let (Some(api_key), Some(server_name)) = - (api_key.as_ref(), server_name.as_ref()) - { - match call_get_podcast_id_from_ep_name( - &server_name, - &api_key, - episode_name.unwrap(), - episode_url.unwrap(), - user_id, - ) - .await - { - Ok(id) => { - podcast_id.set(id); - - match call_get_auto_download_status( - &server_name, - user_id, - &Some(api_key.clone().unwrap()), - id, - ) - .await - { - Ok(status) => { - download_status.set(status); - } - Err(e) => { - web_sys::console::log_1( - &format!( - "Error getting auto-download status: {}", - e - ) - .into(), - ); - } - } - match call_get_feed_cutoff_days( - &server_name, - &Some(api_key.clone().unwrap()), - id, - user_id, - ) - .await - { - Ok(days) => { - feed_cutoff_days.set(days); - feed_cutoff_days_input.set(days.to_string()); - } - Err(e) => { - web_sys::console::log_1( - &format!( - "Error getting feed cutoff days: {}", - e - ) - .into(), - ); - } - } - // Add notification status check here - match call_get_podcast_notifications_status( - server_name.clone(), - api_key.clone().unwrap(), - user_id, - id, - ) - .await - { - Ok(status) => { - notification_effect.set(status); - } - Err(e) => { - web_sys::console::log_1( - &format!( - "Error getting notification status: {}", - e - ) - .into(), - ); - } - } - match call_get_play_episode_details( - &server_name, - &Some(api_key.clone().unwrap()), - user_id, - id, // podcast_id - false, // is_youtube (probably false for most podcasts, adjust if needed) - ) - .await - { - Ok((speed, start, end)) => { - effect_start_skip.set(start); - effect_end_skip.set(end); - effect_playback_speed.set(speed as f64); - } - Err(e) => { - web_sys::console::log_1( - &format!( - "Error getting auto-skip times: {}", - e - ) - .into(), - ); - } - } - loading_ep.set(false); - let chap_request = FetchPodcasting2PodDataRequest { - podcast_id: id, - user_id, - }; - match call_fetch_podcasting_2_pod_data( - &server_name, - &api_key, - &chap_request, - ) - .await - { - Ok(response) => { - // let chapters = response.chapters.clone(); // Clone chapters to avoid move issue - let value = response.value.clone(); - let funding = response.funding.clone(); - let podroll = response.podroll.clone(); - let people = response.people.clone(); - audio_dispatch.reduce_mut(|state| { - state.podcast_value4value = Some(value); - state.podcast_funding = Some(funding); - state.podcast_podroll = Some(podroll); - state.podcast_people = Some(people); - }); - } - Err(e) => { - web_sys::console::log_1( - &format!("Error fetching 2.0 data: {}", e) - .into(), - ); - } - } - } - Err(e) => { - web_sys::console::log_1( - &format!("Error getting podcast ID: {}", e).into(), - ); - } - } - } - }); - } - } - || () - }, - ); - } - - let open_in_new_tab = Callback::from(move |url: String| { - let window = web_sys::window().unwrap(); - window.open_with_url_and_target(&url, "_blank").unwrap(); - }); - - // Function to handle link clicks - let history_handle = history.clone(); - let handle_click = Callback::from(move |event: MouseEvent| { - if let Some(target) = event.target_dyn_into::() { - if let Some(href) = target.get_attribute("href") { - event.prevent_default(); - if href.starts_with("http") { - // External link, open in a new tab - web_sys::window() - .unwrap() - .open_with_url_and_target(&href, "_blank") - .unwrap(); - } else { - // Internal link, use Yew Router to navigate - history_handle.push(href); - } - } - } - }); - - let node_ref = use_node_ref(); - - use_effect_with((), move |_| { - if let Some(container) = node_ref.cast::() { - if let Ok(links) = container.query_selector_all("a") { - for i in 0..links.length() { - if let Some(link) = links.item(i) { - let link = link.dyn_into::().unwrap(); - let handle_click_clone = handle_click.clone(); - let listener = - gloo_events::EventListener::new(&link, "click", move |event| { - handle_click_clone - .emit(event.clone().dyn_into::().unwrap()); - }); - listener.forget(); // Prevent listener from being dropped - } - } - } - } - - || () - }); - - let delete_history = history.clone(); - let delete_all_click = { - let add_dispatch = _search_dispatch.clone(); - let pod_values = clicked_podcast_info.clone(); - - let user_id_og = user_id.clone(); - - let api_key_clone = api_key.clone(); - let server_name_clone = server_name.clone(); - let app_dispatch = _search_dispatch.clone(); - let call_is_added = is_added.clone(); - let page_state = page_state.clone(); - - Callback::from(move |e: MouseEvent| { - e.prevent_default(); - let hist = delete_history.clone(); - let page_state = page_state.clone(); - let pod_title_og = pod_values.clone().unwrap().podcastname.clone(); - let pod_feed_url_og = pod_values.clone().unwrap().feedurl.clone(); - let is_youtube = pod_values.clone().unwrap().is_youtube.unwrap_or(false); - app_dispatch.reduce_mut(|state| state.is_loading = Some(true)); - let is_added_inner = call_is_added.clone(); - let call_dispatch = add_dispatch.clone(); - let pod_title = pod_title_og.clone(); - let pod_title_yt = pod_title_og.clone(); - let pod_feed_url = pod_feed_url_og.clone(); - let pod_feed_url_yt = pod_feed_url_og.clone(); - let pod_feed_url_check = pod_feed_url_og.clone(); - let user_id = user_id_og.clone().unwrap(); - let podcast_values = RemovePodcastValuesName { - podcast_name: pod_title, - podcast_url: pod_feed_url, - user_id, - }; - let remove_channel = RemoveYouTubeChannelValues { - user_id, - channel_name: pod_title_yt, - channel_url: pod_feed_url_yt, - }; - let api_key_call = api_key_clone.clone(); - let server_name_call = server_name_clone.clone(); - let app_dispatch = app_dispatch.clone(); - wasm_bindgen_futures::spawn_local(async move { - let dispatch_wasm = call_dispatch.clone(); - let api_key_wasm = api_key_call.clone().unwrap(); - let server_name_wasm = server_name_call.clone(); - - let result = if pod_feed_url_check.starts_with("https://www.youtube.com") { - call_remove_youtube_channel( - &server_name_wasm.unwrap(), - &api_key_wasm, - &remove_channel, - ) - .await - } else { - call_remove_podcasts_name( - &server_name_wasm.unwrap(), - &api_key_wasm, - &podcast_values, - ) - .await - }; - - match result { - Ok(success) => { - if success { - dispatch_wasm.reduce_mut(|state| { - state.info_message = Some( - if pod_feed_url_check.starts_with("https://www.youtube.com") { - "YouTube channel successfully removed".to_string() - } else { - "Podcast successfully removed".to_string() - }, - ) - }); - app_dispatch.reduce_mut(|state| state.is_loading = Some(false)); - is_added_inner.set(false); - app_dispatch.reduce_mut(|state| { - state.podcast_added = Some(podcast_added); - }); - - if pod_feed_url_check.starts_with("https://www.youtube.com") { - hist.push("/podcasts"); - } - } else { - dispatch_wasm.reduce_mut(|state| { - state.error_message = Some(if is_youtube { - "Failed to remove YouTube channel".to_string() - } else { - "Failed to remove podcast".to_string() - }) - }); - app_dispatch.reduce_mut(|state| state.is_loading = Some(false)); - } - page_state.set(PageState::Hidden); - } - Err(e) => { - let formatted_error = format_error_message(&e.to_string()); - dispatch_wasm.reduce_mut(|state| { - state.error_message = - Some(format!("Error removing content: {:?}", formatted_error)) - }); - app_dispatch.reduce_mut(|state| state.is_loading = Some(false)); - } - } - }); - }) - }; - - let download_server_name = server_name.clone(); - let download_api_key = api_key.clone(); - let download_dispatch = _search_dispatch.clone(); - let app_state = search_state.clone(); - - let download_all_click = { - let call_dispatch = download_dispatch.clone(); - let server_name_copy = download_server_name.clone(); - let api_key_copy = download_api_key.clone(); - let user_id_copy = user_id.clone(); - let search_call_state = app_state.clone(); - - Callback::from(move |e: MouseEvent| { - e.prevent_default(); - let server_name = server_name_copy.clone(); - let api_key = api_key_copy.clone(); - let search_state = search_call_state.clone(); - let call_down_dispatch = call_dispatch.clone(); - wasm_bindgen_futures::spawn_local(async move { - let episode_id = match search_state - .podcast_feed_results - .as_ref() - .and_then(|results| results.episodes.get(0)) - .and_then(|episode| episode.episode_id) - { - Some(id) => id, - None => { - eprintln!("No episode_id found"); - return; - } - }; - let is_youtube = match search_state - .podcast_feed_results - .as_ref() - .and_then(|results| results.episodes.get(0)) - .and_then(|episode| episode.is_youtube) - { - Some(id) => id, - None => { - eprintln!("No is_youtube info found"); - return; - } - }; - let ep_api_key = api_key.clone(); - let ep_server_name = server_name.clone(); - let ep_user_id = user_id_copy.clone(); - match call_get_podcast_id_from_ep( - &ep_server_name.unwrap(), - &ep_api_key.unwrap(), - episode_id, - ep_user_id.unwrap(), - Some(is_youtube), - ) - .await - { - Ok(podcast_id) => { - let request = DownloadAllPodcastRequest { - podcast_id, - user_id: user_id_copy.unwrap(), - }; - - match call_download_all_podcast( - &server_name.unwrap(), - &api_key.flatten(), - &request, - ) - .await - { - Ok(success_message) => { - call_down_dispatch.reduce_mut(|state| { - state.info_message = - Option::from(format!("{}", success_message)) - }); - } - Err(e) => { - let formatted_error = format_error_message(&e.to_string()); - call_down_dispatch.reduce_mut(|state| { - state.error_message = - Option::from(format!("{}", formatted_error)) - }); - } - } - } - Err(e) => { - call_down_dispatch.reduce_mut(|state| { - let formatted_error = format_error_message(&e.to_string()); - state.error_message = Option::from(format!( - "Failed to get podcast ID: {}", - formatted_error - )) - }); - } - } - }); - }) - }; - - // Define the state of the application - #[derive(Clone, PartialEq)] - enum PageState { - Hidden, - Shown, - Download, - Delete, - RSSFeed, - } - - let button_content = if *is_added { trash_icon() } else { add_icon() }; - - let setting_content = if *is_added { - settings_icon() - } else { - no_icon() - }; - let download_all = if *is_added { - download_icon() - } else { - no_icon() - }; - - let payment_icon = { payments_icon() }; - let rss_icon = { rss_icon() }; - - let website_icon = { website_icon() }; - - let on_close_modal = { - let page_state = page_state.clone(); - Callback::from(move |_| { - page_state.set(PageState::Hidden); - }) - }; - - let on_background_click = { - let on_close_modal = on_close_modal.clone(); - Callback::from(move |e: MouseEvent| { - let target = e.target().unwrap(); - let element = target.dyn_into::().unwrap(); - if element.tag_name() == "DIV" { - on_close_modal.emit(e); - } - }) - }; - - let stop_propagation = Callback::from(|e: MouseEvent| { - e.stop_propagation(); - }); - - let toggle_download = { - let api_key = api_key.clone(); - let server_name = server_name.clone(); - let download_status = download_status.clone(); - let podcast_id = podcast_id.clone(); - let user_id = user_id.clone(); - - Callback::from(move |_| { - let api_key = api_key.clone(); - let server_name = server_name.clone(); - let download_status = download_status.clone(); - let auto_download = !*download_status; - let pod_id_deref = *podcast_id.clone(); - let user_id = user_id.clone().unwrap(); - - let request_data = AutoDownloadRequest { - podcast_id: pod_id_deref, // Replace with the actual podcast ID - user_id, - auto_download, - }; - - wasm_bindgen_futures::spawn_local(async move { - if let (Some(api_key), Some(server_name)) = (api_key.as_ref(), server_name.as_ref()) - { - match call_enable_auto_download( - &server_name, - &api_key.clone().unwrap(), - &request_data, - ) - .await - { - Ok(_) => { - download_status.set(auto_download); - } - Err(e) => { - web_sys::console::log_1( - &format!("Error enabling/disabling downloads: {}", e).into(), - ); - } - } - } - }); - }) - }; - - let playback_speed_input_handler = Callback::from(move |e: InputEvent| { - if let Some(input) = e.target_dyn_into::() { - let value = input.value().parse::().unwrap_or(1.0); - // Constrain to reasonable values (0.5 to 3.0) - let value = value.max(0.5).min(2.0); - playback_speed_input.set(value); - } - }); - - // Create the save playback speed function - let save_playback_speed = { - let playback_speed = playback_speed.clone(); - let api_key = api_key.clone(); - let user_id = user_id.clone(); - let server_name = server_name.clone(); - let podcast_id = podcast_id.clone(); - let dispatch = _search_dispatch.clone(); - - Callback::from(move |e: MouseEvent| { - e.prevent_default(); - let call_dispatch = dispatch.clone(); - let speed = *playback_speed; - let api_key = api_key.clone(); - let user_id = user_id.clone().unwrap(); - let server_name = server_name.clone(); - let podcast_id = *podcast_id; - - wasm_bindgen_futures::spawn_local(async move { - if let (Some(api_key), Some(server_name)) = (api_key.as_ref(), server_name.as_ref()) - { - let request = PlaybackSpeedRequest { - podcast_id, - user_id, - playback_speed: speed, - }; - - match call_set_playback_speed(&server_name, &api_key, &request).await { - Ok(_) => { - call_dispatch.reduce_mut(|state| { - state.info_message = - Option::from("Playback speed updated".to_string()) - }); - } - Err(e) => { - web_sys::console::log_1( - &format!("Error updating playback speed: {}", e).into(), - ); - call_dispatch.reduce_mut(|state| { - state.error_message = - Option::from("Error updating playback speed".to_string()) - }); - } - } - } - }); - }) - }; - - // Create the clear playback speed function - let clear_playback_speed = { - let api_key = api_key.clone(); - let user_id = user_id.clone(); - let server_name = server_name.clone(); - let podcast_id = podcast_id.clone(); - let dispatch = _search_dispatch.clone(); - Callback::from(move |e: MouseEvent| { - e.prevent_default(); - let call_dispatch = dispatch.clone(); - let api_key = api_key.clone(); - let user_id = user_id.clone().unwrap(); - let server_name = server_name.clone(); - let podcast_id = *podcast_id; - wasm_bindgen_futures::spawn_local(async move { - if let (Some(api_key), Some(server_name)) = (api_key.as_ref(), server_name.as_ref()) - { - let request = ClearPlaybackSpeedRequest { - podcast_id, - user_id, - }; - match call_clear_playback_speed(&server_name, &api_key, &request).await { - Ok(_) => { - call_dispatch.reduce_mut(|state| { - state.info_message = - Option::from("Playback speed reset to default".to_string()) - }); - } - Err(e) => { - web_sys::console::log_1( - &format!("Error resetting playback speed: {}", e).into(), - ); - call_dispatch.reduce_mut(|state| { - state.error_message = - Option::from("Error resetting playback speed".to_string()) - }); - } - } - } - }); - }) - }; - - // Add this callback for handling input changes - let feed_cutoff_days_input_handler = { - let feed_cutoff_days_input = feed_cutoff_days_input.clone(); - - Callback::from(move |e: InputEvent| { - if let Some(input) = e.target_dyn_into::() { - feed_cutoff_days_input.set(input.value()); - } - }) - }; - - // Add this callback for saving the feed cutoff days - let save_feed_cutoff_days = { - let dispatch_vid = _search_dispatch.clone(); - let server_name = server_name.clone(); - let api_key = api_key.clone(); - let podcast_id = podcast_id.clone(); - let feed_cutoff_days_input = feed_cutoff_days_input.clone(); - let feed_cutoff_days = feed_cutoff_days.clone(); - let user_id = search_state.user_details.as_ref().map(|ud| ud.UserID); - - Callback::from(move |e: MouseEvent| { - e.prevent_default(); - let dispatch_wasm = dispatch_vid.clone(); - - // Extract the values directly without creating intermediate variables - if let (Some(server_val), Some(key_val), Some(user_val)) = ( - server_name.as_ref(), - api_key.as_ref().and_then(|k| k.as_ref()), - user_id, - ) { - let pod_id = *podcast_id; - let days_str = (*feed_cutoff_days_input).clone(); - let days = days_str.parse::().unwrap_or(0); - let request_data = UpdateFeedCutoffDaysRequest { - podcast_id: pod_id, - user_id: user_val, - feed_cutoff_days: days, - }; - - // Clone everything needed for the async block - let server_val = server_val.clone(); - let key_val = key_val.clone(); - let feed_cutoff_days = feed_cutoff_days.clone(); - - wasm_bindgen_futures::spawn_local(async move { - match call_update_feed_cutoff_days(&server_val, &Some(key_val), &request_data) - .await - { - Ok(_) => { - feed_cutoff_days.set(days); - dispatch_wasm.reduce_mut(|state| { - state.info_message = - Option::from("Youtube Episode Limit Updated!".to_string()) - }); - // No need to update a ClickedFeedURL or PodcastInfo struct - // Just update the state - } - Err(err) => { - web_sys::console::log_1( - &format!("Error updating feed cutoff days: {}", err).into(), - ); - dispatch_wasm.reduce_mut(|state| { - state.error_message = Option::from(format!( - "Error updating feed cutoff days: {:?}", - err - )) - }); - } - } - }); - } - }) - }; - - let toggle_notifications = { - let api_key = api_key.clone(); - let server_name = server_name.clone(); - let notification_status = notification_status.clone(); - let podcast_id = podcast_id.clone(); - let user_id = user_id.clone(); - Callback::from(move |_| { - let api_key = api_key.clone(); - let server_name = server_name.clone(); - let notification_status = notification_status.clone(); - let enabled = !*notification_status; - let pod_id_deref = *podcast_id.clone(); - let user_id = user_id.clone().unwrap(); - - wasm_bindgen_futures::spawn_local(async move { - if let (Some(api_key), Some(server_name)) = (api_key.as_ref(), server_name.as_ref()) - { - match call_toggle_podcast_notifications( - server_name.clone(), - api_key.clone().unwrap(), - user_id, - pod_id_deref, - enabled, - ) - .await - { - Ok(_) => { - notification_status.set(enabled); - } - Err(e) => { - web_sys::console::log_1( - &format!("Error toggling notifications: {}", e).into(), - ); - } - } - } - }); - }) - }; - - let start_skip_call = start_skip.clone(); - let end_skip_call = end_skip.clone(); - let start_skip_call_button = start_skip.clone(); - let end_skip_call_button = end_skip.clone(); - let skip_dispatch = _search_dispatch.clone(); - - // Save the skip times to the server - let save_skip_times = { - let start_skip = start_skip.clone(); - let end_skip = end_skip.clone(); - let api_key = api_key.clone(); - let user_id = user_id.clone(); - let server_name = server_name.clone(); - let podcast_id = podcast_id.clone(); - let skip_dispatch = skip_dispatch.clone(); - - Callback::from(move |e: MouseEvent| { - e.prevent_default(); - let skip_call_dispatch = skip_dispatch.clone(); - let start_skip = *start_skip; - let end_skip = *end_skip; - let api_key = api_key.clone(); - let user_id = user_id.clone().unwrap(); - let server_name = server_name.clone(); - let podcast_id = *podcast_id; - - wasm_bindgen_futures::spawn_local(async move { - if let (Some(api_key), Some(server_name)) = (api_key.as_ref(), server_name.as_ref()) - { - let request = SkipTimesRequest { - podcast_id, - start_skip, - end_skip, - user_id, - }; - - match call_adjust_skip_times(&server_name, &api_key, &request).await { - Ok(_) => { - skip_call_dispatch.reduce_mut(|state| { - state.info_message = Option::from("Skip times Adjusted".to_string()) - }); - } - Err(e) => { - web_sys::console::log_1( - &format!("Error updating skip times: {}", e).into(), - ); - skip_call_dispatch.reduce_mut(|state| { - state.error_message = - Option::from("Error Adjusting Skip Times".to_string()) - }); - } - } - } - }); - }) - }; - - // let onclick_cat = new_category - let app_dispatch_add = _search_dispatch.clone(); - let onclick_add = { - // let dispatch = dispatch.clone(); - let server_name = server_name.clone(); - let api_key = api_key.clone(); - let user_id = user_id.clone(); // Assuming user_id is an Option or similar - let podcast_id = podcast_id.clone(); // Assuming this is available in your context - let new_category = new_category.clone(); // Assuming this is a state that stores the new category input - - Callback::from(move |event: web_sys::MouseEvent| { - event.prevent_default(); // Prevent the default form submit or page reload behavior - let app_dispatch = app_dispatch_add.clone(); - if new_category.is_empty() { - web_sys::console::log_1(&"Category name cannot be empty".into()); - return; - } - - // let dispatch = dispatch.clone(); - let server_name = server_name.clone().unwrap(); - let api_key = api_key.clone().unwrap(); - let user_id = user_id.clone().unwrap(); // Assuming user_id is Some(i32) - let podcast_id = *podcast_id; // Assuming podcast_id is Some(i32) - let category_name = (*new_category).clone(); - let cat_name_dis = category_name.clone(); - - wasm_bindgen_futures::spawn_local(async move { - let request_data = AddCategoryRequest { - podcast_id, - user_id, - category: category_name, - }; - - // Await the async function call - let response = call_add_category(&server_name, &api_key, &request_data).await; - - // Match on the awaited response - match response { - Ok(_) => { - app_dispatch.reduce_mut(|state| { - if let Some(ref mut podcast_info) = state.clicked_podcast_info { - if let Some(ref mut categories) = podcast_info.categories { - // Add the new category to the HashMap - categories.insert(cat_name_dis.clone(), cat_name_dis.clone()); - } else { - // Initialize the HashMap if it's None - let mut new_map = HashMap::new(); - new_map.insert(cat_name_dis.clone(), cat_name_dis); - podcast_info.categories = Some(new_map); - } - } - }); - } - Err(err) => { - web_sys::console::log_1(&format!("Error adding category: {}", err).into()); - } - } - }); - }) - }; - - let category_to_remove = use_state(|| None::); - let onclick_remove = { - let category_to_remove = category_to_remove.clone(); - Callback::from(move |event: MouseEvent| { - event.prevent_default(); - let target = event.target_unchecked_into::(); - let closest_button = target.closest("button").unwrap(); - if let Some(button) = closest_button { - if let Some(category) = button.get_attribute("data-category") { - category_to_remove.set(Some(category)); - } - } - }) - }; - - let app_dispatch = _search_dispatch.clone(); - - { - let category_to_remove = category_to_remove.clone(); - let server_name = server_name.clone(); - let api_key = api_key.clone(); - let user_id = user_id; - let podcast_id = *podcast_id; - - use_effect_with(category_to_remove, move |category_to_remove| { - if let Some(category) = (**category_to_remove).clone() { - let server_name = server_name.clone().unwrap(); - let api_key = api_key.clone().unwrap(); - let user_id = user_id.unwrap(); - let category_request = category.clone(); - wasm_bindgen_futures::spawn_local(async move { - let request_data = RemoveCategoryRequest { - podcast_id, - user_id, - category, - }; - // Your API call here - let response = - call_remove_category(&server_name, &api_key, &request_data).await; - match response { - Ok(_) => { - app_dispatch.reduce_mut(|state| { - if let Some(ref mut podcast_info) = state.clicked_podcast_info { - if let Some(ref mut categories) = podcast_info.categories { - // Filter the HashMap and collect back into HashMap - *categories = categories - .clone() - .into_iter() - .filter(|(_, cat)| cat != &category_request) // Ensure you're comparing correctly - .collect(); - } - } - }); - } - Err(err) => { - web_sys::console::log_1( - &format!("Error removing category: {}", err).into(), - ); - } - } - }); - } - || () - }); - } - - // Fetch RSS key when RSS feed modal is shown - { - let rss_key_state = rss_key_state.clone(); - let server_name = search_state - .auth_details - .as_ref() - .map(|ud| ud.server_name.clone()); - let api_key = api_key.clone().flatten(); - let page_state_clone = page_state.clone(); - - use_effect_with( - (page_state_clone.clone(), rss_key_state.is_none()), - move |(current_page_state, rss_key_is_none)| { - if matches!(**current_page_state, PageState::RSSFeed) && *rss_key_is_none { - if let (Some(server_name), Some(api_key), Some(user_id)) = - (server_name.clone(), api_key.clone(), user_id.clone()) - { - let rss_key_state = rss_key_state.clone(); - wasm_bindgen_futures::spawn_local(async move { - match call_get_rss_key(&server_name, &Some(api_key), user_id).await { - Ok(rss_key) => { - rss_key_state.set(Some(rss_key)); - } - Err(e) => { - web_sys::console::log_1( - &format!("Failed to fetch RSS key: {}", e).into(), - ); - } - } - }); - } - } - || () - }, - ); - } - - let rss_feed_modal = { - let rss_key_state_clone = rss_key_state.clone(); - - let rss_url = match (*rss_key_state_clone).as_ref() { - Some(rss_key) => format!( - "{}/{}?api_key={}&podcast_id={}", - get_rss_base_url(), - user_id.clone().unwrap_or_default(), - rss_key, - *podcast_id - ), - None => "Loading RSS key...".to_string(), - }; - - let copy_onclick = { - let rss_url = rss_url.clone(); - Callback::from(move |_| { - if let Some(window) = web_sys::window() { - let _ = window.navigator().clipboard().write_text(&rss_url); - } - }) - }; - - html! { - - } - }; - - // Define the modal components - let clicked_feed = clicked_podcast_info.clone(); - let podcast_option_model = html! { - - }; - - // Define the modal components - let download_all_model = html! { - - }; - - // Define the modal components - let delete_pod_model = html! { - - }; - - // Define the callback functions - let toggle_settings = { - let page_state = page_state.clone(); - Callback::from(move |_: MouseEvent| { - page_state.set(PageState::Shown); - }) - }; - - let toggle_download = { - let page_state = page_state.clone(); - Callback::from(move |_: MouseEvent| { - page_state.set(PageState::Download); - }) - }; - - let toggle_delete = { - let page_state = page_state.clone(); - Callback::from(move |_: MouseEvent| { - page_state.set(PageState::Delete); - }) - }; - - let toggle_podcast = { - let add_dispatch = _search_dispatch.clone(); - let pod_values = clicked_podcast_info.clone(); - let user_id_og = user_id.clone(); - - let api_key_clone = api_key.clone(); - let server_name_clone = server_name.clone(); - let user_id_clone = user_id.clone(); - let app_dispatch = _search_dispatch.clone(); - - let is_added = is_added.clone(); - let added_id = podcast_id.clone(); - - if *is_added == true { - toggle_delete - } else { - Callback::from(move |_: MouseEvent| { - // Ensure this is triggered only by a MouseEvent - let callback_podcast_id = added_id.clone(); - let podcast_id_og = Some(pod_values.clone().unwrap().podcastid.clone()); - let pod_title_og = pod_values.clone().unwrap().podcastname.clone(); - let pod_artwork_og = pod_values.clone().unwrap().artworkurl.clone(); - let pod_author_og = pod_values.clone().unwrap().author.clone(); - let categories_og = pod_values.clone().unwrap().categories.unwrap().clone(); - let pod_description_og = pod_values.clone().unwrap().description.clone(); - let pod_episode_count_og = pod_values.clone().unwrap().episodecount.clone(); - let pod_feed_url_og = pod_values.clone().unwrap().feedurl.clone(); - let pod_website_og = pod_values.clone().unwrap().websiteurl.clone(); - let pod_explicit_og = pod_values.clone().unwrap().explicit.clone(); - let app_dispatch = app_dispatch.clone(); - app_dispatch.reduce_mut(|state| state.is_loading = Some(true)); - let is_added_inner = is_added.clone(); - let call_dispatch = add_dispatch.clone(); - let pod_title = pod_title_og.clone(); - let pod_artwork = pod_artwork_og.clone(); - let pod_author = pod_author_og.clone(); - let categories = categories_og.clone(); - let pod_description = pod_description_og.clone(); - let pod_episode_count = pod_episode_count_og.clone(); - let pod_feed_url = pod_feed_url_og.clone(); - let pod_website = pod_website_og.clone(); - let pod_explicit = pod_explicit_og.clone(); - let user_id = user_id_og.clone().unwrap(); - let podcast_values = PodcastValues { - pod_title, - pod_artwork, - pod_author, - categories, - pod_description, - pod_episode_count, - pod_feed_url, - pod_website, - pod_explicit, - user_id, - }; - let api_key_call = api_key_clone.clone(); - let server_name_call = server_name_clone.clone(); - let user_id_call = user_id_clone.clone(); - - wasm_bindgen_futures::spawn_local(async move { - let dispatch_wasm = call_dispatch.clone(); - let api_key_wasm = api_key_call.clone().unwrap(); - let user_id_wasm = user_id_call.clone().unwrap(); - let server_name_wasm = server_name_call.clone(); - let pod_values_clone = podcast_values.clone(); // Make sure you clone the podcast values - - match call_add_podcast( - &server_name_wasm.clone().unwrap(), - &api_key_wasm, - user_id_wasm, - &pod_values_clone, - podcast_id_og, - ) - .await - { - Ok(response_body) => { - // Replace the problematic sections in episodes_layout.rs with this code: - - // First issue: response_body.podcast_id is now i32, not Option - if response_body.success { - dispatch_wasm.reduce_mut(|state| { - state.info_message = - Option::from("Podcast successfully added".to_string()) - }); - app_dispatch.reduce_mut(|state| state.is_loading = Some(false)); - is_added_inner.set(true); - - // podcast_id is now directly an i32, not an Option - let call_podcast_id = response_body.podcast_id; - callback_podcast_id.set(call_podcast_id); - - // Since first_episode_id is now an i32, use it directly - let episode_id = Some(response_body.first_episode_id); - // Use the episode_id for further processing - app_dispatch.reduce_mut(|state| { - state.selected_episode_id = episode_id; - // Now this matches Option - }); - - // Fetch episodes - podcast_id is now direct i32 - match call_get_podcast_episodes( - &server_name_wasm.unwrap(), - &api_key_wasm, - &user_id_wasm, - &call_podcast_id, - ) - .await - { - Ok(podcast_feed_results) => { - app_dispatch.reduce_mut(move |state| { - state.podcast_feed_results = Some(podcast_feed_results); - }); - app_dispatch - .reduce_mut(|state| state.is_loading = Some(false)); - } - Err(e) => { - web_sys::console::log_1( - &format!("Error fetching episodes: {:?}", e).into(), - ); - } - } - - app_dispatch.reduce_mut(|state| { - state.podcast_added = Some(podcast_added); - }); - } else { - dispatch_wasm.reduce_mut(|state| { - state.error_message = - Option::from("Failed to add podcast".to_string()) - }); - app_dispatch.reduce_mut(|state| state.is_loading = Some(false)); - } - } - Err(e) => { - let formatted_error = format_error_message(&e.to_string()); - dispatch_wasm.reduce_mut(|state| { - state.error_message = Option::from(format!( - "Error adding podcast: {:?}", - formatted_error - )) - }); - app_dispatch.reduce_mut(|state| state.is_loading = Some(false)); - } - } - }); - }) - } - }; - - #[derive(Clone, PartialEq)] - enum CompletedFilter { - ShowAll, - ShowOnly, - Hide, - } - - let filtered_episodes = use_memo( - ( - podcast_feed_results.clone(), - episode_search_term.clone(), - episode_sort_direction.clone(), - completed_filter_state.clone(), // Changed from show_completed - show_in_progress.clone(), - ), - |(episodes, search, sort_dir, _show_completed, show_in_progress)| { - if let Some(results) = episodes { - let mut filtered = results - .episodes - .iter() - .filter(|episode| { - // Search filter - let matches_search = if !search.is_empty() { - episode.title.as_ref().map_or(false, |title| { - title.to_lowercase().contains(&search.to_lowercase()) - }) || episode.description.as_ref().map_or(false, |desc| { - desc.to_lowercase().contains(&search.to_lowercase()) - }) - } else { - true - }; - - // Status filter - let matches_status = if **show_in_progress { - !episode.completed.unwrap_or(false) - && episode.listen_duration.unwrap_or(0) > 0 - } else { - match *completed_filter_state { - CompletedFilter::ShowOnly => episode.completed.unwrap_or(false), - CompletedFilter::Hide => !episode.completed.unwrap_or(false), - CompletedFilter::ShowAll => true, - } - }; - - matches_search && matches_status - }) - .cloned() - .collect::>(); - - // Sort logic - if let Some(direction) = (*sort_dir).as_ref() { - filtered.sort_by(|a, b| match direction { - EpisodeSortDirection::NewestFirst => b.pub_date.cmp(&a.pub_date), - EpisodeSortDirection::OldestFirst => a.pub_date.cmp(&b.pub_date), - EpisodeSortDirection::ShortestFirst => a.duration.cmp(&b.duration), - EpisodeSortDirection::LongestFirst => b.duration.cmp(&a.duration), - EpisodeSortDirection::TitleAZ => a.title.cmp(&b.title), - EpisodeSortDirection::TitleZA => b.title.cmp(&a.title), - }); - } - filtered - } else { - vec![] - } - }, - ); - - #[wasm_bindgen] - extern "C" { - #[wasm_bindgen(js_namespace = window)] - fn toggle_description(guid: &str); - } - - let web_link = open_in_new_tab.clone(); - let pod_layout_data = clicked_podcast_info.clone(); - - - let (completed_icon, completed_text, completed_title) = match *completed_filter_state { - CompletedFilter::ShowOnly => ( - "ph-check-circle", - "Show Only", - "Showing only completed episodes", - ), - CompletedFilter::Hide => ("ph-x-circle", "Hide", "Hiding completed episodes"), - CompletedFilter::ShowAll => ("ph-circle", "All", "Showing all episodes"), - }; - - html! { -
- - - { - match *page_state { - PageState::Shown => podcast_option_model, - PageState::Download => download_all_model, - PageState::Delete => delete_pod_model, - PageState::RSSFeed => rss_feed_modal, - _ => html! {}, - } - } - { - if *loading { // If loading is true, display the loading animation - html! { -
-
-
-
-
-
-
-
- } - } else { - html! { - <> - { - if let Some(podcast_info) = pod_layout_data { - let sanitized_title = podcast_info.podcastname.replace(|c: char| !c.is_alphanumeric(), "-"); - let desc_id = format!("desc-{}", sanitized_title); - let pod_link = podcast_info.websiteurl.clone(); - - let toggle_description = { - let desc_id = desc_id.clone(); - Callback::from(move |_| { - let desc_id = desc_id.clone(); - wasm_bindgen_futures::spawn_local(async move { - let window = web_sys::window().expect("no global `window` exists"); - let function = window - .get("toggle_description") - .expect("should have `toggle_description` as a function") - .dyn_into::() - .unwrap(); - let this = JsValue::NULL; - let guid = JsValue::from_str(&desc_id); - function.call1(&this, &guid).unwrap(); - }); - }) - }; - let sanitized_description = sanitize_html(&podcast_info.description); - let layout = if state.is_mobile.unwrap_or(false) { - html! { -
-
- - { - if let Some(funding_list) = &state.podcast_funding { - if !funding_list.is_empty() { - let funding_list_clone = funding_list.clone(); - html! { - <> - { for funding_list_clone.iter().map(|funding| { - let open_in_new_tab = open_in_new_tab.clone(); - let payment_icon = payment_icon.clone(); - let url = funding.url.clone(); - html! { - - } - })} - - } - } else { - html! {} - } - } else { - html! {} - } - } - { - if search_state.podcast_added.unwrap() { - html! { - - } - } else { - html! {} - } - } - { - if search_state.podcast_added.unwrap() { - html! { - - } - } else { - html! {} - } - } - - { - if search_state.podcast_added.unwrap() { - html! { - - } - } else { - html! {} - } - } -
-
- -
- -

{ &podcast_info.podcastname }

-
- { sanitized_description } - -
-

{ format!("Episode Count: {}", &podcast_info.episodecount) }

-

{ format!("Authors: {}", &podcast_info.author) }

-

{ format!("Explicit: {}", if podcast_info.explicit { "Yes" } else { "No" }) }

- { - if !podcast_info.is_youtube.unwrap_or(false) { // Only show if not a YouTube channel - if let Some(people) = &state.podcast_people { - if !people.is_empty() { - html! { -
-
- -
-
- } - } else { - html! {} - } - } else { - html! {} - } - } else { - html! {} - } - } -
-
- { - if let Some(categories) = &podcast_info.categories { - html! { - for categories.iter().map(|(_, category_name)| { - html! { { category_name } } - }) - } - } else { - html! {} - } - } -
- -
-
- } - } else { - let pod_link = podcast_info.feedurl.clone(); - html! { -
- -
-
-

{ &podcast_info.podcastname }

- { - if search_state.podcast_added.unwrap() { - html! { - - } - } else { - html! {} - } - } - - { - if search_state.podcast_added.unwrap() { - html! { - - } - } else { - html! {} - } - } -
- - //

{ &podcast_info.podcast_description }

-
- { sanitized_description } - -
- - { - if let Some(funding_list) = &state.podcast_funding { - if !funding_list.is_empty() { - let funding_list_clone = funding_list.clone(); - html! { - <> - { for funding_list_clone.iter().map(|funding| { - let open_in_new_tab = open_in_new_tab.clone(); - let payment_icon = payment_icon.clone(); - let url = funding.url.clone(); - html! { - - } - })} - - } - } else { - html! {} - } - } else { - html! {} - } - } - { - if search_state.podcast_added.unwrap_or(false) { - html! { - - } - } else { - html! {} - } - } -
- -

{ format!("Episode Count: {}", &podcast_info.episodecount) }

-

{ format!("Authors: {}", &podcast_info.author) }

-

{ format!("Explicit: {}", if podcast_info.explicit { "Yes" } else { "No" }) }

- { - if let Some(people) = &state.podcast_people { - if !people.is_empty() { - html! { -
// Added min-width: 0 to allow shrinking -
- -
-
- } - } else { - html! {} - } - } else { - html! {} - } - } -
- { - if let Some(categories) = &podcast_info.categories { - html! { - for categories.values().map(|category_name| { - html! { { category_name } } - }) - } - } else { - html! {} - } - } -
- -
-
-
- } - }; - - layout - } else { - html! {} - } - } - { - // Modern mobile-friendly filter bar - html! { -
- // Combined search and sort bar (seamless design) -
- // Search input (left half) -
- () { - episode_search_term.set(input.value()); - } - }) - } - /> - -
- - // Sort dropdown (right half) -
- - -
-
- - // Filter chips (horizontal scroll on mobile) -
- // Clear all filters - - - // Completed filter chip (3-state) - - - // In progress filter chip - - - // Selection mode toggle - -
- - // Smart selection buttons when in selection mode - { - if *is_selecting { - let filtered_episodes_clone = filtered_episodes.clone(); - let selected_episodes_clone = selected_episodes.clone(); - - html! { -
- // Select All / Deselect All - - - - // Select Unplayed Only - -
- } - } else { - html! {} - } - } -
- } - } - - - // Bulk action toolbar - { - if *is_selecting && !selected_episodes.is_empty() { - let selected_count = selected_episodes.len(); - let selected_ids: Vec = selected_episodes.iter().cloned().collect(); - let user_id_value = user_id.unwrap_or(0); - - html! { -
-
-
- - {format!("{} episode{} selected", selected_count, if selected_count == 1 { "" } else { "s" })} -
-
- // Mark Complete button - - - // Save button - - - // Queue button - - - // Download button - -
-
-
- } - } else { - html! {} - } - } - - { - if let (Some(_), Some(podcast_info)) = (podcast_feed_results, &clicked_podcast_info) { - let podcast_link_clone = podcast_info.feedurl.clone(); - let podcast_title = podcast_info.podcastname.clone(); - - // Episode selection callback - let selected_episodes_clone = selected_episodes.clone(); - let on_episode_select = Callback::from(move |(episode_id, is_selected): (i32, bool)| { - selected_episodes_clone.set({ - let mut current = (*selected_episodes_clone).clone(); - if is_selected { - current.insert(episode_id); - } else { - current.remove(&episode_id); - } - current - }); - }); - - // Select all older episodes callback - let filtered_episodes_older = filtered_episodes.clone(); - let selected_episodes_older = selected_episodes.clone(); - let on_select_older = Callback::from(move |cutoff_episode_id: i32| { - let cutoff_index = filtered_episodes_older.iter() - .position(|ep| ep.episode_id == Some(cutoff_episode_id)) - .unwrap_or(0); - - let older_ids: HashSet = filtered_episodes_older.iter() - .skip(cutoff_index) // Include the cutoff episode and all after it (older in reverse chronological order) - .filter_map(|ep| ep.episode_id) - .collect(); - - selected_episodes_older.set({ - let mut current = (*selected_episodes_older).clone(); - current.extend(older_ids); - current - }); - }); - - // Select all newer episodes callback - let filtered_episodes_newer = filtered_episodes.clone(); - let selected_episodes_newer = selected_episodes.clone(); - let on_select_newer = Callback::from(move |cutoff_episode_id: i32| { - let cutoff_index = filtered_episodes_newer.iter() - .position(|ep| ep.episode_id == Some(cutoff_episode_id)) - .unwrap_or(0); - - let newer_ids: HashSet = filtered_episodes_newer.iter() - .take(cutoff_index + 1) // Include episodes before the cutoff (newer in reverse chronological order) - .filter_map(|ep| ep.episode_id) - .collect(); - - selected_episodes_newer.set({ - let mut current = (*selected_episodes_newer).clone(); - current.extend(newer_ids); - current - }); - }); - - html! { - - } - - } else { - html! { -
- -

{ "No Episodes Found" }

-

{"This podcast strangely doesn't have any episodes. Try a more mainstream one maybe?"}

-
- } - } - } - - } - } - } - - { - if let Some(audio_props) = &state.currently_playing { - html! { } - } else { - html! {} - } - } -
- - } -} diff --git a/web/src/components/feed.rs b/web/src/components/feed.rs deleted file mode 100644 index be3f4a2a..00000000 --- a/web/src/components/feed.rs +++ /dev/null @@ -1,498 +0,0 @@ -use super::app_drawer::App_drawer; -use super::gen_components::{ - empty_message, on_shownotes_click, use_long_press, virtual_episode_item, Search_nav, - UseScrollToTop, -}; -use crate::components::audio::on_play_pause; -use crate::components::audio::AudioPlayer; -use crate::components::context::{AppState, ExpandedDescriptions, UIState}; -use crate::components::gen_funcs::{ - format_datetime, match_date_format, parse_date, sanitize_html_with_blank_target, -}; -use crate::requests::pod_req; -use crate::requests::pod_req::Episode as EpisodeData; -use crate::requests::pod_req::RecentEps; -use gloo::events::EventListener; -use wasm_bindgen::JsCast; -use web_sys::window; -use web_sys::{Element, HtmlElement}; -use yew::prelude::*; -use yew::{function_component, html, Html}; -use yew_router::history::BrowserHistory; -use yewdux::prelude::*; - -use wasm_bindgen::prelude::*; - -#[function_component(Feed)] -pub fn feed() -> Html { - let (state, dispatch) = use_store::(); - - let error = use_state(|| None); - let (post_state, _post_dispatch) = use_store::(); - let (audio_state, _audio_dispatch) = use_store::(); - let loading = use_state(|| true); - - // Fetch episodes on component mount - let loading_ep = loading.clone(); - { - // let episodes = episodes.clone(); - let error = error.clone(); - let api_key = post_state - .auth_details - .as_ref() - .map(|ud| ud.api_key.clone()); - let user_id = post_state.user_details.as_ref().map(|ud| ud.UserID.clone()); - let server_name = post_state - .auth_details - .as_ref() - .map(|ud| ud.server_name.clone()); - - let effect_dispatch = dispatch.clone(); - - // fetch_episodes(api_key.flatten(), user_id, server_name, dispatch, error, pod_req::call_get_recent_eps); - - use_effect_with( - (api_key.clone(), user_id.clone(), server_name.clone()), - move |_| { - let error_clone = error.clone(); - if let (Some(api_key), Some(user_id), Some(server_name)) = - (api_key.clone(), user_id.clone(), server_name.clone()) - { - let dispatch = effect_dispatch.clone(); - wasm_bindgen_futures::spawn_local(async move { - match pod_req::call_get_recent_eps(&server_name, &api_key, &user_id).await { - Ok(fetched_episodes) => { - let completed_episode_ids: Vec = fetched_episodes - .iter() - .filter(|ep| ep.completed) - .map(|ep| ep.episodeid) - .collect(); - let saved_episode_ids: Vec = fetched_episodes - .iter() - .filter(|ep| ep.saved) - .map(|ep| ep.episodeid) - .collect(); - let queued_episode_ids: Vec = fetched_episodes - .iter() - .filter(|ep| ep.queued) - .map(|ep| ep.episodeid) - .collect(); - let downloaded_episode_ids: Vec = fetched_episodes - .iter() - .filter(|ep| ep.downloaded) - .map(|ep| ep.episodeid) - .collect(); - dispatch.reduce_mut(move |state| { - state.server_feed_results = Some(RecentEps { - episodes: Some(fetched_episodes), - }); - state.completed_episodes = Some(completed_episode_ids); - state.saved_episode_ids = Some(saved_episode_ids); - state.queued_episode_ids = Some(queued_episode_ids); - state.downloaded_episode_ids = Some(downloaded_episode_ids); - }); - loading_ep.set(false); - } - Err(e) => { - error_clone.set(Some(e.to_string())); - loading_ep.set(false); - } - } - }); - } - || () - }, - ); - } - - html! { - <> -
- - - { - if *loading { // If loading is true, display the loading animation - html! { -
-
-
-
-
-
-
-
- } - } else { - if let Some(recent_eps) = state.server_feed_results.clone() { - let int_recent_eps = recent_eps.clone(); - if let Some(episodes) = int_recent_eps.episodes { - - if episodes.is_empty() { - // Render "No Recent Episodes Found" if episodes list is empty - empty_message( - "No Recent Episodes Found", - "You can add new podcasts by using the search bar above. Search for your favorite podcast and click the plus button to add it." - ) - } else { - html! { - - } - } - } else { - empty_message( - "No Recent Episodes Found", - "You can add new podcasts by using the search bar above. Search for your favorite podcast and click the plus button to add it." - ) - } - } else { - empty_message( - "No Recent Episodes Found", - "You can add new podcasts by using the search bar above. Search for your favorite podcast and click the plus button to add it." - ) - } - } - } - { - if let Some(audio_props) = &audio_state.currently_playing { - html! { } - } else { - html! {} - } - } -
- - - } -} - -#[derive(Properties, PartialEq)] -pub struct VirtualListProps { - pub episodes: Vec, - pub page_type: String, -} - -#[function_component(VirtualList)] -pub fn virtual_list(props: &VirtualListProps) -> Html { - let scroll_pos = use_state(|| 0.0); - let container_ref = use_node_ref(); - let container_height = use_state(|| 0.0); - let item_height = use_state(|| 234.0); // Default item height - let force_update = use_state(|| 0); - - // Effect to set initial container height, item height, and listen for window resize - { - let container_height = container_height.clone(); - let item_height = item_height.clone(); - let force_update = force_update.clone(); - - use_effect_with((), move |_| { - let window = window().expect("no global `window` exists"); - let window_clone = window.clone(); - - let update_sizes = Callback::from(move |_| { - let height = window_clone.inner_height().unwrap().as_f64().unwrap(); - container_height.set(height - 100.0); - - let width = window_clone.inner_width().unwrap().as_f64().unwrap(); - // Add 16px (mb-4) to each height value for the virtual list calculations - let new_item_height = if width <= 530.0 { - 122.0 + 16.0 // Base height + margin - } else if width <= 768.0 { - 150.0 + 16.0 // Base height + margin - } else { - 221.0 + 16.0 // Base height + margin - }; - - item_height.set(new_item_height); - force_update.set(*force_update + 1); - }); - - update_sizes.emit(()); - - let listener = EventListener::new(&window, "resize", move |_| { - update_sizes.emit(()); - }); - - move || drop(listener) - }); - } - - // Effect for scroll handling remains the same - { - let scroll_pos = scroll_pos.clone(); - let container_ref = container_ref.clone(); - use_effect_with(container_ref.clone(), move |container_ref| { - let container = container_ref.cast::().unwrap(); - let listener = EventListener::new(&container, "scroll", move |event| { - let target = event.target().unwrap().unchecked_into::(); - scroll_pos.set(target.scroll_top() as f64); - }); - move || drop(listener) - }); - } - - let start_index = (*scroll_pos / *item_height).floor() as usize; - let visible_count = ((*container_height / *item_height).ceil() as usize) + 1; - let end_index = (start_index + visible_count).min(props.episodes.len()); - - let visible_episodes = (start_index..end_index) - .map(|index| { - let episode = props.episodes[index].clone(); - html! { - - } - }) - .collect::(); - - let total_height = props.episodes.len() as f64 * *item_height; - let offset_y = start_index as f64 * *item_height; - - html! { -
-
-
- { visible_episodes } -
-
-
- } -} - -#[wasm_bindgen] -extern "C" { - #[wasm_bindgen(js_namespace = window)] - fn toggleDescription(guid: &str, expanded: bool); -} -#[derive(Properties, PartialEq, Clone)] -pub struct EpisodeProps { - pub episode: EpisodeData, - pub page_type: String, // New prop to determine the context (e.g., "home", "saved") -} - -#[function_component(Episode)] -pub fn episode(props: &EpisodeProps) -> Html { - let (state, dispatch) = use_store::(); - let (audio_state, audio_dispatch) = use_store::(); - let (desc_state, desc_dispatch) = use_store::(); - let api_key = state.auth_details.as_ref().map(|ud| ud.api_key.clone()); - let user_id = state.user_details.as_ref().map(|ud| ud.UserID.clone()); - let server_name = state.auth_details.as_ref().map(|ud| ud.server_name.clone()); - let id_string = &props.episode.episodeid.to_string(); - let history = BrowserHistory::new(); - let history_clone = history.clone(); - let show_modal = use_state(|| false); - let show_clonedal = show_modal.clone(); - let show_clonedal2 = show_modal.clone(); - let on_modal_open = Callback::from(move |_: MouseEvent| show_clonedal.set(true)); - let container_height = use_state(|| "221px".to_string()); - - // This will track if we're showing the context menu from a long press - let show_context_menu = use_state(|| false); - let context_menu_position = use_state(|| (0, 0)); - - // Long press handler - simulate clicking the context button - let context_button_ref = use_node_ref(); - let on_long_press = { - let context_button_ref = context_button_ref.clone(); - let show_context_menu = show_context_menu.clone(); - let context_menu_position = context_menu_position.clone(); - - Callback::from(move |event: TouchEvent| { - if let Some(touch) = event.touches().get(0) { - // Record position for the context menu - context_menu_position.set((touch.client_x(), touch.client_y())); - - // Find and click the context button (if it exists) - if let Some(button) = context_button_ref.cast::() { - button.click(); - } else { - // If the button doesn't exist (maybe on mobile where it's hidden) - // we'll just set our state to show the menu - show_context_menu.set(true); - } - } - }) - }; - - // Setup long press detection - let (on_touch_start, on_touch_end, on_touch_move, is_long_press_state, is_pressing_state) = - use_long_press(on_long_press, Some(600)); // 600ms for long press - - let is_long_press = is_long_press_state; - let is_pressing = is_pressing_state; - - // When long press is detected through the hook, update our state - { - let show_context_menu = show_context_menu.clone(); - use_effect_with(is_long_press, move |is_pressed| { - if *is_pressed { - show_context_menu.set(true); - } - || () - }); - } - - let on_modal_close = Callback::from(move |_: MouseEvent| show_clonedal2.set(false)); - - let desc_expanded = desc_state.expanded_descriptions.contains(id_string); - - let toggle_expanded = { - let desc_dispatch = desc_dispatch.clone(); - let episode_guid = props.episode.episodeid.clone().to_string(); - - Callback::from(move |_: MouseEvent| { - let guid = episode_guid.clone(); - desc_dispatch.reduce_mut(move |state| { - if state.expanded_descriptions.contains(&guid) { - state.expanded_descriptions.remove(&guid); - toggleDescription(&guid, false); - } else { - state.expanded_descriptions.insert(guid.clone()); - toggleDescription(&guid, true); - } - }); - }) - }; - - let is_current_episode = audio_state - .currently_playing - .as_ref() - .map_or(false, |current| { - current.episode_id == props.episode.episodeid - }); - - let is_playing = audio_state.audio_playing.unwrap_or(false); - - { - let container_height = container_height.clone(); - use_effect_with((), move |_| { - let update_height = { - let container_height = container_height.clone(); - Callback::from(move |_| { - if let Some(window) = window() { - if let Ok(width) = window.inner_width() { - if let Some(width) = width.as_f64() { - let new_height = if width <= 530.0 { - "122px" - } else if width <= 768.0 { - "150px" - } else { - "221px" - }; - container_height.set(new_height.to_string()); - } - } - } - }) - }; - - // Set initial height - update_height.emit(()); - - // Add resize listener - let listener = EventListener::new(&window().unwrap(), "resize", move |_| { - update_height.emit(()); - }); - - move || drop(listener) - }); - } - - let date_format = match_date_format(state.date_format.as_deref()); - let datetime = parse_date(&props.episode.episodepubdate, &state.user_tz); - let formatted_date = format!( - "{}", - format_datetime(&datetime, &state.hour_preference, date_format) - ); - - let on_play_pause = on_play_pause( - props.episode.episodeurl.clone(), - props.episode.episodetitle.clone(), - props.episode.episodedescription.clone(), - formatted_date.clone(), - props.episode.episodeartwork.clone(), - props.episode.episodeduration.clone(), - props.episode.episodeid.clone(), - props.episode.listenduration.clone(), - api_key.unwrap().unwrap(), - user_id.unwrap(), - server_name.unwrap(), - audio_dispatch.clone(), - audio_state.clone(), - None, - Some(props.episode.is_youtube.clone()), - ); - - let on_shownotes_click = { - on_shownotes_click( - history_clone.clone(), - dispatch.clone(), - Some(props.episode.episodeid.clone()), - Some(props.page_type.clone()), - Some(props.page_type.clone()), - Some(props.page_type.clone()), - true, - None, - Some(props.episode.is_youtube.clone()), - ) - }; - - let is_completed = state - .completed_episodes - .as_ref() - .unwrap_or(&vec![]) - .contains(&props.episode.episodeid); - - // Close context menu callback - let close_context_menu = { - let show_context_menu = show_context_menu.clone(); - Callback::from(move |_| { - show_context_menu.set(false); - }) - }; - - let item = virtual_episode_item( - Box::new(props.episode.clone()), - sanitize_html_with_blank_target(&props.episode.episodedescription), - desc_expanded, - &formatted_date, - on_play_pause, - on_shownotes_click, - toggle_expanded, - props.episode.episodeduration, - props.episode.listenduration, - &props.page_type, - Callback::from(|_| {}), - false, - props.episode.episodeurl.clone(), - is_completed, - *show_modal, - on_modal_open.clone(), - on_modal_close.clone(), - (*container_height).clone(), - is_current_episode, - is_playing, - // Add new params for touch events - on_touch_start, - on_touch_end, - on_touch_move, - *show_context_menu, - *context_menu_position, - close_context_menu, - context_button_ref, - is_pressing, - ); - - item -} diff --git a/web/src/components/gen_components.rs b/web/src/components/gen_components.rs index ced0593c..2b7f0906 100644 --- a/web/src/components/gen_components.rs +++ b/web/src/components/gen_components.rs @@ -1,36 +1,30 @@ use crate::components::context::{AppState, UIState}; #[cfg(not(feature = "server_build"))] -use crate::components::downloads_tauri::{ +use crate::pages::downloads_tauri::{ download_file, remove_episode_from_local_db, update_local_database, update_podcast_database, }; +use crate::requests::episode::Episode; + use crate::components::gen_funcs::format_error_message; -use crate::components::gen_funcs::{format_time, strip_images_from_html}; +use crate::components::gen_funcs::format_time; use crate::components::notification_center::{NotificationCenter, ToastNotification}; use crate::components::safehtml::SafeHtml; -use crate::requests::people_req::PersonEpisode; use crate::requests::pod_req::{ call_download_episode, call_mark_episode_completed, call_mark_episode_uncompleted, call_queue_episode, call_remove_downloaded_episode, call_remove_queued_episode, - call_remove_saved_episode, call_save_episode, DownloadEpisodeRequest, Episode, EpisodeDownload, - HistoryEpisode, HomeEpisode, MarkEpisodeCompletedRequest, QueuePodcastRequest, QueuedEpisode, - SavePodcastRequest, SavedEpisode, + call_remove_saved_episode, call_save_episode, DownloadEpisodeRequest, + MarkEpisodeCompletedRequest, QueuePodcastRequest, SavePodcastRequest, }; #[cfg(not(feature = "server_build"))] use crate::requests::pod_req::{ call_get_episode_metadata, call_get_podcast_details, EpisodeRequest, }; -use crate::requests::search_pods::Episode as SearchNewEpisode; -use crate::requests::search_pods::SearchEpisode; use crate::requests::search_pods::{ - call_get_podcast_info, call_youtube_search, test_connection, PeopleEpisode, - YouTubeSearchResults, + call_get_podcast_info, call_youtube_search, test_connection, YouTubeSearchResults, }; use gloo_events::EventListener; use gloo_timers::callback::Timeout; -use std::any::Any; -use std::rc::Rc; use wasm_bindgen::closure::Closure; -use wasm_bindgen::prelude::*; use wasm_bindgen::JsCast; use web_sys::HtmlElement; use web_sys::{window, Element, HtmlInputElement, MouseEvent}; @@ -221,8 +215,6 @@ pub fn search_bar() -> Html { return; } is_submitting.set(true); - - let submit_state = state.clone(); let api_url = state.server_details.as_ref().map(|ud| ud.api_url.clone()); let history = history.clone(); let search_value = podcast_value.clone(); @@ -232,85 +224,47 @@ pub fn search_bar() -> Html { wasm_bindgen_futures::spawn_local(async move { dispatch.reduce_mut(|state| state.is_loading = Some(true)); + if *search_index == "youtube" { + match call_youtube_search(&search_value, &api_url.unwrap()).await { + Ok(yt_results) => { + let search_results = YouTubeSearchResults { + channels: yt_results.results, + videos: Vec::new(), + }; + + dispatch.reduce_mut(|state| { + state.youtube_search_results = Some(search_results); + state.is_loading = Some(false); + }); - match test_connection(&api_url.clone().unwrap()).await { - Ok(_) => { - if *search_index == "youtube" { - let server_name = submit_state - .auth_details - .as_ref() - .map(|ud| ud.server_name.clone()) - .unwrap(); - let api_key = submit_state - .auth_details - .as_ref() - .map(|ud| ud.api_key.clone()) - .unwrap() - .unwrap(); - let user_id = submit_state - .user_details - .as_ref() - .map(|ud| ud.UserID.clone()) - .unwrap(); - - match call_youtube_search( - &search_value, - &api_url.unwrap(), - ) - .await - { - Ok(yt_results) => { - let search_results = YouTubeSearchResults { - channels: yt_results.results, - videos: Vec::new(), - }; - - dispatch.reduce_mut(|state| { - state.youtube_search_results = Some(search_results); - state.is_loading = Some(false); - }); - - history.push("/youtube_layout"); - } - Err(e) => { - let formatted_error = format_error_message(&e.to_string()); - dispatch.reduce_mut(|state| { - state.error_message = Some(format!( - "YouTube search error: {}", - formatted_error - )); - state.is_loading = Some(false); - }); - } - } - } else { - match call_get_podcast_info( - &search_value, - &api_url.unwrap(), - &search_index, - ) - .await - { - Ok(search_results) => { - dispatch.reduce_mut(move |state| { - state.search_results = Some(search_results); - state.podcast_added = Some(false); - }); - dispatch.reduce_mut(|state| state.is_loading = Some(false)); - history.push("/pod_layout"); - } - Err(_) => { - dispatch.reduce_mut(|state| state.is_loading = Some(false)); - } - } + history.push("/youtube_layout"); + } + Err(e) => { + let formatted_error = format_error_message(&e.to_string()); + dispatch.reduce_mut(|state| { + state.error_message = + Some(format!("YouTube search error: {}", formatted_error)); + state.is_loading = Some(false); + }); } } - Err(e) => { - web_sys::console::log_1(&format!("Error testing connection: {}", e).into()); - dispatch.reduce_mut(|state| state.is_loading = Some(false)); + } else { + match call_get_podcast_info(&search_value, &api_url.unwrap(), &search_index) + .await + { + Ok(search_results) => { + dispatch.reduce_mut(move |state| { + state.search_results = Some(search_results); + state.podcast_added = Some(false); + }); + dispatch.reduce_mut(|state| state.is_loading = Some(false)); + history.push("/pod_layout"); + } + Err(_) => { + dispatch.reduce_mut(|state| state.is_loading = Some(false)); + } } } - // Reset submission state after completion is_submitting_clone.set(false); }); @@ -718,2636 +672,290 @@ pub fn first_admin_modal(props: &FirstAdminModalProps) -> Html { } } -#[derive(Properties, Clone)] -pub struct ContextButtonProps { - pub episode: Box, - pub page_type: String, - #[prop_or(false)] - pub show_menu_only: bool, - #[prop_or(None)] - pub position: Option<(i32, i32)>, - #[prop_or(None)] - pub on_close: Option>, +pub fn empty_message(header: &str, paragraph: &str) -> Html { + html! { +
+ +

{ header }

+

{ paragraph }

+
+ } } -#[function_component(ContextButton)] -pub fn context_button(props: &ContextButtonProps) -> Html { - let dropdown_open = use_state(|| false); - let (post_state, post_dispatch) = use_store::(); - let (_ui_state, _ui_dispatch) = use_store::(); - let api_key = post_state - .auth_details - .as_ref() - .map(|ud| ud.api_key.clone()); - let user_id = post_state.user_details.as_ref().map(|ud| ud.UserID.clone()); - let server_name = post_state - .auth_details - .as_ref() - .map(|ud| ud.server_name.clone()); - let dropdown_ref = use_node_ref(); - let button_ref = use_node_ref(); - - // Update dropdown_open if show_menu_only prop changes - { - let dropdown_open = dropdown_open.clone(); - use_effect_with(props.show_menu_only, move |show_menu_only| { - if *show_menu_only { - dropdown_open.set(true); +pub fn on_shownotes_click( + history: BrowserHistory, + dispatch: Dispatch, + episode_id: i32, + shownotes_episode_url: String, + episode_audio_url: String, + podcast_title: String, + _db_added: bool, + person_episode: bool, + is_youtube: bool, +) -> Callback { + Callback::from(move |_: MouseEvent| { + web_sys::console::log_1( + &format!("Executing shownotes click. is_youtube: {:?}", is_youtube).into(), + ); + + let show_notes = shownotes_episode_url.clone(); + let ep_aud = episode_audio_url.clone(); + let pod_title = podcast_title.clone(); + + let dispatch_clone = dispatch.clone(); + let history_clone = history.clone(); + + wasm_bindgen_futures::spawn_local(async move { + if episode_id != 0 { + history_clone.push(format!("/episode?episode_id={}", episode_id)); + } else { + let mut new_url = "/episode".to_string(); + new_url.push_str("?podcast_title="); + new_url.push_str(&urlencoding::encode(&pod_title)); + new_url.push_str("&episode_url="); + new_url.push_str(&urlencoding::encode(&show_notes)); + new_url.push_str("&audio_url="); + new_url.push_str(&urlencoding::encode(&ep_aud)); + new_url.push_str("&is_youtube="); + new_url.push_str(&is_youtube.to_string()); + + history_clone.push(new_url); + + dispatch_clone.reduce_mut(move |state| { + state.selected_episode_id = Some(episode_id); + state.selected_episode_url = Some(show_notes); + state.selected_episode_audio_url = Some(ep_aud); + state.selected_podcast_title = Some(pod_title); + state.person_episode = Some(person_episode); + state.selected_is_youtube = is_youtube; + state.fetched_episode = None; + }); } - || () }); - } + }) +} - let toggle_dropdown = { - let dropdown_open = dropdown_open.clone(); - Callback::from(move |e: MouseEvent| { - e.stop_propagation(); - dropdown_open.set(!*dropdown_open); - }) - }; +// First the modal component +#[derive(Properties, PartialEq)] +pub struct EpisodeModalProps { + pub episode_id: i32, // Instead of Box + pub episode_url: String, + pub episode_artwork: String, + pub episode_title: String, + pub description: String, + pub format_release: String, + pub duration: i32, + pub on_close: Callback, + pub on_show_notes: Callback, + pub listen_duration_percentage: i32, + pub is_youtube: bool, +} - // Close dropdown when clicking outside - { - let dropdown_open = dropdown_open.clone(); - let dropdown_ref = dropdown_ref.clone(); - let button_ref = button_ref.clone(); +#[function_component(EpisodeModal)] +pub fn episode_modal(props: &EpisodeModalProps) -> Html { + let onclick_outside = { let on_close = props.on_close.clone(); - let show_menu_only = props.show_menu_only; - - use_effect_with((*dropdown_open, ()), move |_| { - let document = window().unwrap().document().unwrap(); - let dropdown_open = dropdown_open.clone(); - let dropdown_ref = dropdown_ref.clone(); - let button_ref = button_ref.clone(); - let on_close = on_close.clone(); - let show_menu_only = show_menu_only; - - // Handle outside clicks/touches to dismiss menu - let handle_outside_interaction = { - let dropdown_open = dropdown_open.clone(); - let dropdown_ref = dropdown_ref.clone(); - let button_ref = button_ref.clone(); - let on_close = on_close.clone(); - - move |event: &web_sys::Event| { - if *dropdown_open { - if let Ok(target) = event.target().unwrap().dyn_into::() { - if let Some(dropdown_element) = dropdown_ref.cast::() { - // Check if click is outside dropdown - let outside_dropdown = !dropdown_element.contains(Some(&target)); - - // Check if click is outside button (only if button exists) - let outside_button = if let Some(button_element) = button_ref.cast::() { - !button_element.contains(Some(&target)) - } else { - // If no button exists (show_menu_only case), consider it as outside - true - }; - - if outside_dropdown && outside_button { - dropdown_open.set(false); - // If this is a long press menu (show_menu_only is true), - // call the on_close callback when clicked outside - if show_menu_only { - if let Some(on_close) = &on_close { - on_close.emit(()); - } - } - } - } - } - } + Callback::from(move |e: MouseEvent| { + if let Some(target) = e.target_dyn_into::() { + if target.class_list().contains("modal-overlay") { + on_close.emit(e); } - }; + } + }) + }; + let formatted_duration = format_time(props.duration.into()); - // Add click listener for desktop - let click_handler = handle_outside_interaction.clone(); - let click_listener = EventListener::new(&document, "click", move |event| { - click_handler(event); - }); + html! { +