diff --git a/.github/workflows/backend-docker-publish.yml b/.github/workflows/backend-docker-publish.yml
new file mode 100644
index 00000000..88c07be0
--- /dev/null
+++ b/.github/workflows/backend-docker-publish.yml
@@ -0,0 +1,87 @@
+name: Publish Backend Multi-Architecture Image to DockerHub
+on:
+ push:
+ branches:
+ - main
+ paths:
+ - 'Backend/**'
+ workflow_dispatch:
+env:
+ REGISTRY: docker.io
+ IMAGE_NAME: madeofpendletonwool/pinepods_backend
+jobs:
+ build-and-push-x86:
+ runs-on: ubuntu-latest
+ permissions:
+ contents: read
+ packages: write
+ id-token: write
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v2
+ - name: Log in to Docker Hub
+ uses: docker/login-action@v2
+ with:
+ username: ${{ secrets.DOCKER_USERNAME }}
+ password: ${{ secrets.DOCKER_KEY }}
+ - name: Build and push x86 image
+ run: |
+ cd Backend
+ docker build --platform linux/amd64 -t ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest-amd64 -f dockerfile .
+ docker push ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest-amd64
+
+ build-and-push-arm64:
+ runs-on: ubuntu-24.04-arm
+ permissions:
+ contents: read
+ packages: write
+ id-token: write
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v2
+ - name: Log in to Docker Hub
+ uses: docker/login-action@v2
+ with:
+ username: ${{ secrets.DOCKER_USERNAME }}
+ password: ${{ secrets.DOCKER_KEY }}
+ - name: Build and push ARM64 image
+ run: |
+ cd Backend
+ docker build --platform linux/arm64 -t ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest-arm64 -f dockerfile .
+ docker push ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest-arm64
+
+ create-manifests:
+ needs: [build-and-push-x86, build-and-push-arm64]
+ runs-on: ubuntu-latest
+ permissions:
+ contents: read
+ packages: write
+ id-token: write
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v2
+ - name: Log in to Docker Hub
+ uses: docker/login-action@v2
+ with:
+ username: ${{ secrets.DOCKER_USERNAME }}
+ password: ${{ secrets.DOCKER_KEY }}
+
+ - name: Create and push Docker manifest for the latest tag
+ run: |
+ sleep 10
+ # Pull the images first to ensure they're available
+ docker pull ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest-amd64
+ docker pull ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest-arm64
+
+ # Create and push manifest
+ docker manifest create ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest \
+ --amend ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest-amd64 \
+ --amend ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest-arm64
+
+ docker manifest push ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest
diff --git a/.github/workflows/backwards-compatibility.yml b/.github/workflows/backwards-compatibility.yml
new file mode 100644
index 00000000..63755223
--- /dev/null
+++ b/.github/workflows/backwards-compatibility.yml
@@ -0,0 +1,406 @@
+name: Database Backwards Compatibility Test
+
+on:
+ push:
+ branches: [main]
+ pull_request:
+ branches: [main]
+
+env:
+ TEST_DB_PASSWORD: "test_password_123!"
+ TEST_DB_NAME: "pinepods_test_db"
+
+jobs:
+ test-mysql-compatibility:
+ runs-on: ubuntu-latest
+ services:
+ mysql:
+ image: mysql:latest
+ env:
+ MYSQL_ROOT_PASSWORD: test_password_123!
+ MYSQL_DATABASE: pinepods_test_db
+ ports:
+ - 3306:3306
+ options: >-
+ --health-cmd="mysqladmin ping"
+ --health-interval=10s
+ --health-timeout=5s
+ --health-retries=3
+
+ valkey:
+ image: valkey/valkey:8-alpine
+ ports:
+ - 6379:6379
+
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - name: Get previous release tag
+ id: get_previous_tag
+ run: |
+ # Get the latest stable release (exclude rc, alpha, beta)
+ PREVIOUS_TAG=$(git tag --sort=-version:refname | grep -E '^[0-9]+\.[0-9]+\.[0-9]+$' | head -n 1)
+
+ if [ -z "$PREVIOUS_TAG" ]; then
+ echo "No stable release tag found, using 0.7.9 as baseline"
+ PREVIOUS_TAG="0.7.9"
+ fi
+
+ echo "previous_tag=$PREVIOUS_TAG" >> $GITHUB_OUTPUT
+ echo "Using previous tag: $PREVIOUS_TAG"
+
+ - name: Start previous PinePods version
+ run: |
+ echo "🚀 Starting PinePods ${{ steps.get_previous_tag.outputs.previous_tag }}"
+
+ # Create docker-compose for previous version
+ cat > docker-compose.previous.yml << EOF
+ version: '3.8'
+ services:
+ pinepods_previous:
+ image: madeofpendletonwool/pinepods:${{ steps.get_previous_tag.outputs.previous_tag }}
+ environment:
+ DB_TYPE: mysql
+ DB_HOST: mysql
+ DB_PORT: 3306
+ DB_USER: root
+ DB_PASSWORD: ${{ env.TEST_DB_PASSWORD }}
+ DB_NAME: ${{ env.TEST_DB_NAME }}
+ VALKEY_HOST: valkey
+ VALKEY_PORT: 6379
+ HOSTNAME: 'http://localhost:8040'
+ DEBUG_MODE: true
+ SEARCH_API_URL: 'https://search.pinepods.online/api/search'
+ PEOPLE_API_URL: 'https://people.pinepods.online'
+ ports:
+ - "8040:8040"
+ depends_on:
+ - mysql
+ - valkey
+ networks:
+ - test_network
+
+ mysql:
+ image: mysql:8.0
+ environment:
+ MYSQL_ROOT_PASSWORD: ${{ env.TEST_DB_PASSWORD }}
+ MYSQL_DATABASE: ${{ env.TEST_DB_NAME }}
+ networks:
+ - test_network
+
+ valkey:
+ image: valkey/valkey:8-alpine
+ networks:
+ - test_network
+
+ networks:
+ test_network:
+ driver: bridge
+ EOF
+
+ # Start previous version and wait for it to be ready
+ docker compose -f docker-compose.previous.yml up -d
+
+ # Wait for services to be ready
+ echo "⏳ Waiting for previous version to initialize..."
+ sleep 30
+
+ # Check if previous version is responding
+ timeout 60 bash -c 'while ! curl -f http://localhost:8040/api/pinepods_check; do sleep 5; done'
+ echo "✅ Previous version (${{ steps.get_previous_tag.outputs.previous_tag }}) is ready"
+
+ - name: Stop previous version
+ run: |
+ echo "🛑 Stopping previous PinePods version"
+ docker compose -f docker-compose.previous.yml stop pinepods_previous
+ echo "✅ Previous version stopped (database preserved)"
+
+
+ - name: Build current version
+ run: |
+ echo "🔨 Building current PinePods version from source"
+ docker build -f dockerfile -t pinepods-current:test .
+ echo "✅ Build complete"
+
+ - name: Start current version
+ run: |
+
+ # Create docker-compose for current version
+ cat > docker-compose.current.yml << EOF
+ version: '3.8'
+ services:
+ pinepods_current:
+ image: pinepods-current:test
+ environment:
+ DB_TYPE: mysql
+ DB_HOST: mysql
+ DB_PORT: 3306
+ DB_USER: root
+ DB_PASSWORD: ${{ env.TEST_DB_PASSWORD }}
+ DB_NAME: ${{ env.TEST_DB_NAME }}
+ VALKEY_HOST: valkey
+ VALKEY_PORT: 6379
+ HOSTNAME: 'http://localhost:8040'
+ DEBUG_MODE: true
+ SEARCH_API_URL: 'https://search.pinepods.online/api/search'
+ PEOPLE_API_URL: 'https://people.pinepods.online'
+ ports:
+ - "8040:8040"
+ depends_on:
+ - mysql
+ - valkey
+ networks:
+ - test_network
+
+ mysql:
+ image: mysql:8.0
+ environment:
+ MYSQL_ROOT_PASSWORD: ${{ env.TEST_DB_PASSWORD }}
+ MYSQL_DATABASE: ${{ env.TEST_DB_NAME }}
+ networks:
+ - test_network
+
+ valkey:
+ image: valkey/valkey:8-alpine
+ networks:
+ - test_network
+
+ networks:
+ test_network:
+ driver: bridge
+ EOF
+
+ echo "🚀 Starting current PinePods version"
+ # Start current version
+ docker compose -f docker-compose.current.yml up -d pinepods_current
+
+ # Wait for current version to be ready
+ echo "⏳ Waiting for current version to initialize..."
+ sleep 60
+
+ # Check if current version is responding
+ timeout 120 bash -c 'while ! curl -f http://localhost:8040/api/pinepods_check; do echo "Waiting for current version..."; sleep 10; done'
+ echo "✅ Current version is ready"
+
+ - name: Build validator and validate upgraded database
+ run: |
+ echo "🔨 Building database validator"
+ docker build -f Dockerfile.validator -t pinepods-validator .
+
+ echo "🔍 Validating upgraded database schema"
+ docker run --rm --network pinepods_test_network \
+ -e DB_TYPE=mysql \
+ -e DB_HOST=mysql \
+ -e DB_PORT=3306 \
+ -e DB_USER=root \
+ -e DB_PASSWORD=${{ env.TEST_DB_PASSWORD }} \
+ -e DB_NAME=${{ env.TEST_DB_NAME }} \
+ pinepods-validator
+
+ - name: Test basic functionality
+ run: |
+ echo "🧪 Testing basic API functionality"
+
+ # Test health endpoint
+ curl -f http://localhost:8040/api/health || exit 1
+
+ # Test pinepods check endpoint
+ curl -f http://localhost:8040/api/pinepods_check || exit 1
+
+ echo "✅ Basic functionality tests passed"
+
+ - name: Cleanup
+ if: always()
+ run: |
+ echo "🧹 Cleaning up test environment"
+ docker compose -f docker-compose.previous.yml down -v || true
+ docker compose -f docker-compose.current.yml down -v || true
+
+ test-postgresql-compatibility:
+ runs-on: ubuntu-latest
+ services:
+ postgres:
+ image: postgres:15
+ env:
+ POSTGRES_PASSWORD: test_password_123!
+ POSTGRES_DB: pinepods_test_db
+ ports:
+ - 5432:5432
+ options: >-
+ --health-cmd pg_isready
+ --health-interval 10s
+ --health-timeout 5s
+ --health-retries 5
+
+ valkey:
+ image: valkey/valkey:8-alpine
+ ports:
+ - 6379:6379
+
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+
+ - name: Get previous release tag
+ id: get_previous_tag
+ run: |
+ # Get the latest stable release (exclude rc, alpha, beta)
+ PREVIOUS_TAG=$(git tag --sort=-version:refname | grep -E '^[0-9]+\.[0-9]+\.[0-9]+$' | head -n 1)
+
+ if [ -z "$PREVIOUS_TAG" ]; then
+ echo "No stable release tag found, using 0.7.9 as baseline"
+ PREVIOUS_TAG="0.7.9"
+ fi
+
+ echo "previous_tag=$PREVIOUS_TAG" >> $GITHUB_OUTPUT
+ echo "Using previous tag: $PREVIOUS_TAG"
+
+ - name: Start previous PinePods version
+ run: |
+ echo "🚀 Starting PinePods ${{ steps.get_previous_tag.outputs.previous_tag }} (PostgreSQL)"
+
+ cat > docker-compose.postgres-previous.yml << EOF
+ version: '3.8'
+ services:
+ pinepods_previous:
+ image: madeofpendletonwool/pinepods:${{ steps.get_previous_tag.outputs.previous_tag }}
+ environment:
+ DB_TYPE: postgresql
+ DB_HOST: postgres
+ DB_PORT: 5432
+ DB_USER: postgres
+ DB_PASSWORD: ${{ env.TEST_DB_PASSWORD }}
+ DB_NAME: ${{ env.TEST_DB_NAME }}
+ VALKEY_HOST: valkey
+ VALKEY_PORT: 6379
+ HOSTNAME: 'http://localhost:8040'
+ DEBUG_MODE: true
+ SEARCH_API_URL: 'https://search.pinepods.online/api/search'
+ PEOPLE_API_URL: 'https://people.pinepods.online'
+ ports:
+ - "8040:8040"
+ depends_on:
+ - postgres
+ - valkey
+ networks:
+ - test_network
+
+ postgres:
+ image: postgres:latest
+ environment:
+ POSTGRES_PASSWORD: ${{ env.TEST_DB_PASSWORD }}
+ POSTGRES_DB: ${{ env.TEST_DB_NAME }}
+ networks:
+ - test_network
+
+ valkey:
+ image: valkey/valkey:8-alpine
+ networks:
+ - test_network
+
+ networks:
+ test_network:
+ driver: bridge
+ EOF
+
+ docker compose -f docker-compose.postgres-previous.yml up -d
+ sleep 30
+ timeout 60 bash -c 'while ! curl -f http://localhost:8040/api/pinepods_check; do sleep 5; done'
+
+ - name: Stop previous version
+ run: |
+ echo "🛑 Stopping previous PinePods version"
+ docker compose -f docker-compose.postgres-previous.yml stop pinepods_previous
+ echo "✅ Previous version stopped (database preserved)"
+
+ - name: Build current version (PostgreSQL)
+ run: |
+ echo "🔨 Building current PinePods version from source"
+ docker build -f dockerfile -t pinepods-current:test .
+ echo "✅ Build complete"
+
+ - name: Test current version (PostgreSQL)
+ run: |
+ echo "🚀 Starting current PinePods version with PostgreSQL"
+
+ # Create docker-compose for current version
+ cat > docker-compose.postgres-current.yml << EOF
+ version: '3.8'
+ services:
+ pinepods_current:
+ image: pinepods-current:test
+ environment:
+ DB_TYPE: postgresql
+ DB_HOST: postgres
+ DB_PORT: 5432
+ DB_USER: postgres
+ DB_PASSWORD: ${{ env.TEST_DB_PASSWORD }}
+ DB_NAME: ${{ env.TEST_DB_NAME }}
+ VALKEY_HOST: valkey
+ VALKEY_PORT: 6379
+ HOSTNAME: 'http://localhost:8040'
+ DEBUG_MODE: true
+ SEARCH_API_URL: 'https://search.pinepods.online/api/search'
+ PEOPLE_API_URL: 'https://people.pinepods.online'
+ ports:
+ - "8040:8040"
+ depends_on:
+ - postgres
+ - valkey
+ networks:
+ - test_network
+
+ postgres:
+ image: postgres:latest
+ environment:
+ POSTGRES_PASSWORD: ${{ env.TEST_DB_PASSWORD }}
+ POSTGRES_DB: ${{ env.TEST_DB_NAME }}
+ networks:
+ - test_network
+
+ valkey:
+ image: valkey/valkey:8-alpine
+ networks:
+ - test_network
+
+ networks:
+ test_network:
+ driver: bridge
+ EOF
+
+ # Start current version
+ docker compose -f docker-compose.postgres-current.yml up -d pinepods_current
+
+ # Wait for current version to be ready
+ echo "⏳ Waiting for current version to initialize..."
+ sleep 60
+
+ # Check if current version is responding
+ timeout 120 bash -c 'while ! curl -f http://localhost:8040/api/pinepods_check; do echo "Waiting for current version..."; sleep 10; done'
+ echo "✅ Current version is ready"
+
+ - name: Build validator and validate upgraded database (PostgreSQL)
+ run: |
+ echo "🔨 Building PostgreSQL database validator"
+ docker build -f Dockerfile.validator.postgres -t pinepods-validator-postgres .
+
+ echo "🔍 Validating upgraded database schema"
+ docker run --rm --network pinepods_test_network \
+ -e DB_TYPE=postgresql \
+ -e DB_HOST=postgres \
+ -e DB_PORT=5432 \
+ -e DB_USER=postgres \
+ -e DB_PASSWORD=${{ env.TEST_DB_PASSWORD }} \
+ -e DB_NAME=${{ env.TEST_DB_NAME }} \
+ pinepods-validator-postgres
+
+ - name: Cleanup
+ if: always()
+ run: |
+ docker compose -f docker-compose.postgres-previous.yml down -v || true
+ docker compose -f docker-compose.postgres-current.yml down -v || true
diff --git a/.github/workflows/build-android-app.yml b/.github/workflows/build-android-app.yml
deleted file mode 100644
index 57734a52..00000000
--- a/.github/workflows/build-android-app.yml
+++ /dev/null
@@ -1,155 +0,0 @@
-name: Build Android Pinepods App (Legacy Tauri - Deprecated)
-
-on:
- # This workflow is deprecated in favor of build-android-flutter.yml
- # release:
- # types: [published]
- workflow_dispatch:
- inputs:
- version:
- description: "Manual override version tag (optional)"
- required: false
-
-jobs:
- build:
- name: Build Android Release
- runs-on: ubuntu-latest
-
- steps:
- - name: Set Image Tag (Unix)
- run: echo "IMAGE_TAG=${{ github.event.release.tag_name || github.event.inputs.version || 'latest' }}" >> $GITHUB_ENV
-
- - name: Setup | Checkout
- uses: actions/checkout@v3
-
- - name: Set up JDK 17
- uses: actions/setup-java@v3
- with:
- java-version: "17"
- distribution: "temurin"
-
- - name: Setup Android SDK
- uses: android-actions/setup-android@v3
-
- - uses: nttld/setup-ndk@v1
- id: setup-ndk
- with:
- ndk-version: r27b
- add-to-path: false
-
- - uses: hecrj/setup-rust-action@v2
- with:
- rust-version: 1.86
- targets: wasm32-unknown-unknown
-
- - name: Install cargo-binstall
- uses: cargo-bins/cargo-binstall@main
-
- - name: Depends install
- if: ${{ env.DEPENDS_SETUP == 'true' }}
- run: |
- sudo apt update
- sudo apt install -qy libgtk-3-dev
- sudo apt-get install -y libwebkit2gtk-4.0-dev libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf
-
- - name: wasm-addition
- run: |
- rustup target add wasm32-unknown-unknown
- rustup target add aarch64-linux-android
-
- - name: Install Trunk
- run: |
- cargo binstall trunk -y
-
- - name: Install Tauri
- run: |
- cargo install tauri-cli@2.0.0-rc.16 --locked
-
- - name: Update Tauri version (UNIX)
- run: |
- cd web/src-tauri
- # Use different sed syntax for macOS
- if [[ "$OSTYPE" == "darwin"* ]]; then
- sed -i '' "s/\"version\": \".*\"/\"version\": \"${IMAGE_TAG}\"/" tauri.conf.json
- else
- sed -i "s/\"version\": \".*\"/\"version\": \"${IMAGE_TAG}\"/" tauri.conf.json
- fi
- cat tauri.conf.json
- shell: bash
-
- - name: setup Android signing
- run: |
- echo "keyAlias=${{ secrets.ANDROID_KEY_ALIAS }}" > web/src-tauri/gen/android/keystore.properties
- echo "password=${{ secrets.ANDROID_KEY_PW }}" >> web/src-tauri/gen/android/keystore.properties
- base64 -d <<< "${{ secrets.ANDROID_KEY_BASE64 }}" > $RUNNER_TEMP/keystore.jks
- echo "storeFile=$RUNNER_TEMP/keystore.jks" >> web/src-tauri/gen/android/keystore.properties
-
- - name: Setup Android SDK
- uses: android-actions/setup-android@v3
-
- - name: setup Android signing
- run: |
- cd web/src-tauri/gen/android
- echo "keyAlias=${{ secrets.ANDROID_KEY_ALIAS }}" > keystore.properties
- echo "password=${{ secrets.ANDROID_KEY_PW }}" >> keystore.properties
- base64 -d <<< "${{ secrets.ANDROID_KEY_BASE64 }}" > $RUNNER_TEMP/keystore.jks
- echo "storeFile=$RUNNER_TEMP/keystore.jks" >> keystore.properties
-
- - name: Build | Compile (UNIX)
- run: |
- export ANDROID_HOME=$HOME/.android/sdk
- export NDK_HOME=$NDK_JOB_HOME
- export RUSTFLAGS="--cfg=web_sys_unstable_apis" # Add this line
- cd web/src-tauri
- cargo tauri icon icons/Square512x512.png
- cat tauri.conf.json
- cargo tauri android init
- cargo tauri android build --apk
- cargo tauri android build --aab
- shell: bash
- env:
- NDK_JOB_HOME: ${{ steps.setup-ndk.outputs.ndk-path }}
-
- - name: Archive build 1 (apk)
- uses: actions/upload-artifact@v4
- with:
- name: apk-build
- path: ./web/src-tauri/gen/android/app/build/outputs/apk/universal/release/app-universal-release.apk
-
- - name: Archive build 2 (aab)
- uses: actions/upload-artifact@v4
- with:
- name: aab-build
- path: ./web/src-tauri/gen/android/app/build/outputs/bundle/universalRelease/app-universal-release.aab
-
- # - name: Archive build 2 (aab)
- # uses: actions/upload-artifact@v3
- # with:
- # name: ${{ matrix.os }}-build
- # path: ./web/src-tauri/gen/android/app/build/outputs/apk/universal/release/app-universal-release.apk
- # if: ${{ matrix.os == 'ubuntu-latest' }}
-
- # - name: Archive build 2 (Ubuntu)
- # uses: actions/upload-artifact@v3
- # with:
- # name: ${{ matrix.os }}-build
- # path: ./web/src-tauri/target/release/bundle/appimage/${{ env.ARTIFACT_NAME2 }}
- # if: ${{ matrix.os == 'ubuntu-latest' }}
-
- # - name: Archive build 3 (Ubuntu)
- # uses: actions/upload-artifact@v3
- # with:
- # name: ${{ matrix.os }}-build
- # path: ./web/src-tauri/target/release/bundle/rpm/${{ env.ARTIFACT_NAME3 }}
- # if: ${{ matrix.os == 'ubuntu-latest' }}
-
- # - name: Upload release asset (Ubuntu - DEB)
- # if: github.event_name == 'release' && matrix.os == 'ubuntu-latest'
- # uses: actions/upload-release-asset@v1
- # env:
- # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- # with:
- # upload_url: ${{ github.event.release.upload_url }}
- # asset_path: ./web/src-tauri/target/release/bundle/deb/${{ env.ARTIFACT_NAME1 }}
- # asset_name: ${{ env.ARTIFACT_NAME1 }}
- # asset_content_type: application/vnd.debian.binary-package
diff --git a/.github/workflows/build-android-archive.yml b/.github/workflows/build-android-archive.yml
deleted file mode 100644
index 7e5815fb..00000000
--- a/.github/workflows/build-android-archive.yml
+++ /dev/null
@@ -1,172 +0,0 @@
-permissions:
- contents: read
-name: Build Android Release Archive
-
-on:
- workflow_dispatch:
- inputs:
- version:
- description: "Version tag (e.g., v0.7.9)"
- required: false
- default: "manual-build"
-
-jobs:
- build-android:
- name: Build Android Release Archive
- runs-on: ubuntu-latest
-
- steps:
- - name: Set Build Info
- run: |
- echo "VERSION_TAG=${{ github.event.inputs.version }}" >> $GITHUB_ENV
- echo "BUILD_NUMBER=$(date +%s)" >> $GITHUB_ENV
- echo "BUILD_DATE=$(date -u +'%Y-%m-%d %H:%M:%S UTC')" >> $GITHUB_ENV
-
- - name: Checkout repository
- uses: actions/checkout@v4
-
- - name: Set up JDK 17
- uses: actions/setup-java@v4
- with:
- java-version: "17"
- distribution: "temurin"
-
- - name: Setup Android SDK
- uses: android-actions/setup-android@v3
-
- - uses: nttld/setup-ndk@v1
- id: setup-ndk
- with:
- ndk-version: r26d
- link-to-sdk: true
-
- - name: Setup Flutter
- uses: subosito/flutter-action@v2
- with:
- flutter-version: "3.32.0"
- channel: "stable"
-
- - name: Install dependencies
- run: |
- cd mobile
- flutter pub get
-
- - name: Setup Android signing
- run: |
- cd mobile/android
- echo "storePassword=${{ secrets.ANDROID_STORE_PASSWORD }}" > key.properties
- echo "keyPassword=${{ secrets.ANDROID_KEY_PASSWORD }}" >> key.properties
- echo "keyAlias=${{ secrets.ANDROID_KEY_ALIAS }}" >> key.properties
- echo "storeFile=../upload-keystore.jks" >> key.properties
- echo "${{ secrets.ANDROID_KEYSTORE_BASE64 }}" | base64 -d > upload-keystore.jks
-
- - name: Update app version
- run: |
- cd mobile
- if [[ "$VERSION_TAG" != "manual-build" && "$VERSION_TAG" != "" ]]; then
- # Remove 'v' prefix if present
- CLEAN_VERSION=${VERSION_TAG#v}
- sed -i "s/^version: .*/version: $CLEAN_VERSION/" pubspec.yaml
- echo "Updated version to: $CLEAN_VERSION"
- fi
-
- - name: Build signed APK (split per ABI)
- run: |
- cd mobile
- flutter build apk --release --split-per-abi
- echo "Split APK build completed"
-
- - name: Build signed APK (universal)
- run: |
- cd mobile
- flutter build apk --release
- echo "Universal APK build completed"
-
- - name: Build signed AAB (App Bundle)
- run: |
- cd mobile
- flutter build appbundle --release
- echo "AAB build completed"
-
- - name: Prepare release artifacts
- run: |
- cd mobile
- mkdir -p ../release-artifacts/android
-
- # Copy split APKs with descriptive names
- cp build/app/outputs/flutter-apk/app-arm64-v8a-release.apk ../release-artifacts/android/PinePods-${VERSION_TAG}-arm64-v8a.apk
- cp build/app/outputs/flutter-apk/app-armeabi-v7a-release.apk ../release-artifacts/android/PinePods-${VERSION_TAG}-armeabi-v7a.apk
- cp build/app/outputs/flutter-apk/app-x86_64-release.apk ../release-artifacts/android/PinePods-${VERSION_TAG}-x86_64.apk
-
- # Copy universal APK
- cp build/app/outputs/flutter-apk/app-release.apk ../release-artifacts/android/PinePods-${VERSION_TAG}-universal.apk
-
- # Copy AAB for Play Store
- cp build/app/outputs/bundle/release/app-release.aab ../release-artifacts/android/PinePods-${VERSION_TAG}-playstore.aab
-
- # Create build info file
- cat > ../release-artifacts/android/BUILD_INFO.txt << EOF
- PinePods Mobile - Android Release Build
- =====================================
- Version: $VERSION_TAG
- Build Date: $BUILD_DATE
- Build Number: $BUILD_NUMBER
- Flutter Version: 3.32.0
- Package: com.gooseberrydevelopment.pinepods
-
- 📱 APK FILES:
- =============
- PinePods-${VERSION_TAG}-arm64-v8a.apk - 64-bit ARM (most modern phones)
- PinePods-${VERSION_TAG}-armeabi-v7a.apk - 32-bit ARM (older phones)
- PinePods-${VERSION_TAG}-x86_64.apk - 64-bit Intel (emulators/some tablets)
- PinePods-${VERSION_TAG}-universal.apk - Works on all devices (larger size)
-
- 📦 STORE FILES:
- ==============
- PinePods-${VERSION_TAG}-playstore.aab - Google Play Store upload (.aab format)
-
- 🚀 DISTRIBUTION:
- ================
- • Google Play Store: Use the .aab file
- • F-Droid: They build from source (no APK needed)
- • IzzyOnDroid: Use universal.apk or arm64-v8a.apk
- • Direct install: Use arm64-v8a.apk for most users
-
- ⚡ QUICK INSTALL:
- ================
- Most users should download: PinePods-${VERSION_TAG}-arm64-v8a.apk
- EOF
-
- - name: Upload Android Release Artifacts
- uses: actions/upload-artifact@v4
- with:
- name: android-release-${{ env.VERSION_TAG }}
- path: release-artifacts/android/
- retention-days: 90
-
- - name: Create Release Summary
- run: |
- echo "# 🤖 Android Release Build Complete" >> $GITHUB_STEP_SUMMARY
- echo "" >> $GITHUB_STEP_SUMMARY
- echo "**Version:** ${{ github.event.inputs.version }}" >> $GITHUB_STEP_SUMMARY
- echo "**Build Date:** $(date -u +'%Y-%m-%d %H:%M:%S UTC')" >> $GITHUB_STEP_SUMMARY
- echo "" >> $GITHUB_STEP_SUMMARY
-
- echo "## 📦 Generated Files" >> $GITHUB_STEP_SUMMARY
- echo "- 🎯 **arm64-v8a.apk** - Recommended for most users" >> $GITHUB_STEP_SUMMARY
- echo "- 📱 **armeabi-v7a.apk** - For older Android devices" >> $GITHUB_STEP_SUMMARY
- echo "- 💻 **x86_64.apk** - For emulators and Intel devices" >> $GITHUB_STEP_SUMMARY
- echo "- 🌍 **universal.apk** - Works on all devices (larger file)" >> $GITHUB_STEP_SUMMARY
- echo "- 🏪 **playstore.aab** - For Google Play Store upload" >> $GITHUB_STEP_SUMMARY
- echo "" >> $GITHUB_STEP_SUMMARY
- echo "## 📥 Download Instructions" >> $GITHUB_STEP_SUMMARY
- echo "1. Go to **Actions** tab → This workflow run" >> $GITHUB_STEP_SUMMARY
- echo "2. Scroll down to **Artifacts** section" >> $GITHUB_STEP_SUMMARY
- echo "3. Download \`android-release-${{ github.event.inputs.version }}\`" >> $GITHUB_STEP_SUMMARY
- echo "4. Extract the ZIP file" >> $GITHUB_STEP_SUMMARY
- echo "" >> $GITHUB_STEP_SUMMARY
- echo "## 🚀 Next Steps" >> $GITHUB_STEP_SUMMARY
- echo "- Test the APK on your device" >> $GITHUB_STEP_SUMMARY
- echo "- Submit \`.aab\` file to Google Play Console" >> $GITHUB_STEP_SUMMARY
- echo "- Submit \`universal.apk\` to IzzyOnDroid" >> $GITHUB_STEP_SUMMARY
- echo "- Create GitHub release with APKs for direct download" >> $GITHUB_STEP_SUMMARY
diff --git a/.github/workflows/build-android-flutter.yml b/.github/workflows/build-android-flutter.yml
index 6a200e00..e7db7546 100644
--- a/.github/workflows/build-android-flutter.yml
+++ b/.github/workflows/build-android-flutter.yml
@@ -1,5 +1,5 @@
permissions:
- contents: read
+ contents: write
name: Build Android Flutter App
on:
@@ -25,6 +25,9 @@ jobs:
- name: Checkout repository
uses: actions/checkout@v4
+ with:
+ fetch-depth: 0 # Fetch full git history for accurate commit count
+ token: ${{ secrets.GITHUB_TOKEN }}
- name: Set up JDK 17
uses: actions/setup-java@v4
@@ -38,7 +41,7 @@ jobs:
- name: Setup Flutter
uses: subosito/flutter-action@v2
with:
- flutter-version: "3.32.0"
+ flutter-version: "3.35.2"
channel: "stable"
- name: Install dependencies
@@ -55,38 +58,51 @@ jobs:
echo "storeFile=../upload-keystore.jks" >> key.properties
echo "${{ secrets.ANDROID_KEYSTORE_BASE64 }}" | base64 -d > upload-keystore.jks
- - name: Update app version
+ - name: Verify version files
run: |
cd mobile
- # Update pubspec.yaml version to use Flutter format (version+build)
- if [[ "$IMAGE_TAG" != "latest" ]]; then
- # Remove 'v' prefix if present and create build number from date
- VERSION_NAME=${IMAGE_TAG#v}
- BUILD_NUMBER=$(date +%Y%m%d)
- sed -i "s/^version: .*/version: ${VERSION_NAME}+${BUILD_NUMBER}/" pubspec.yaml
- fi
+ echo "Current version in pubspec.yaml:"
+ grep "^version:" pubspec.yaml
+ echo "Current version in environment.dart:"
+ grep "_projectVersion\|_build" lib/core/environment.dart
+ echo "Build will use versions exactly as they are in the repository"
+
- name: Build APK
run: |
cd mobile
flutter build apk --release --split-per-abi
- # - name: Build AAB
- # run: |
- # cd mobile
- # flutter build appbundle --release
+ - name: Build AAB
+ run: |
+ cd mobile
+ flutter build appbundle --release
+
+ - name: Rename APK files
+ run: |
+ cd mobile/build/app/outputs/flutter-apk
+ # Extract version from IMAGE_TAG (remove 'v' prefix if present)
+ VERSION=${IMAGE_TAG#v}
+ if [[ "$VERSION" == "latest" ]]; then
+ VERSION="0.0.0"
+ fi
+
+ # Rename APK files with proper naming convention
+ mv app-armeabi-v7a-release.apk pinepods-armeabi-${VERSION}.apk
+ mv app-arm64-v8a-release.apk pinepods-arm64-${VERSION}.apk
+ mv app-x86_64-release.apk pinepods-x86_64-${VERSION}.apk
- name: Upload APK artifacts
uses: actions/upload-artifact@v4
with:
name: android-apk-builds
- path: mobile/build/app/outputs/flutter-apk/*.apk
+ path: mobile/build/app/outputs/flutter-apk/pinepods-*.apk
- # - name: Upload AAB artifact
- # uses: actions/upload-artifact@v4
- # with:
- # name: android-aab-build
- # path: mobile/build/app/outputs/bundle/release/app-release.aab
+ - name: Upload AAB artifact
+ uses: actions/upload-artifact@v4
+ with:
+ name: android-aab-build
+ path: mobile/build/app/outputs/bundle/release/app-release.aab
# - name: Upload to Google Play Store
# if: github.event_name == 'release'
diff --git a/.github/workflows/build-fdroid.yml b/.github/workflows/build-fdroid.yml
deleted file mode 100644
index e6287bed..00000000
--- a/.github/workflows/build-fdroid.yml
+++ /dev/null
@@ -1,68 +0,0 @@
-name: Build F-Droid APK
-
-on:
- release:
- types: [published]
- workflow_dispatch:
- inputs:
- version:
- description: "Manual override version tag (optional)"
- required: false
-
-jobs:
- build:
- name: Build F-Droid Release
- runs-on: ubuntu-latest
- permissions:
- contents: read
-
- steps:
- - name: Set Image Tag
- run: echo "IMAGE_TAG=${{ github.event.release.tag_name || github.event.inputs.version || 'latest' }}" >> $GITHUB_ENV
-
- - name: Checkout repository
- uses: actions/checkout@v4
-
- - name: Set up JDK 17
- uses: actions/setup-java@v4
- with:
- java-version: "17"
- distribution: "temurin"
-
- - name: Setup Android SDK
- uses: android-actions/setup-android@v3
-
- - name: Setup Flutter
- uses: subosito/flutter-action@v2
- with:
- flutter-version: '3.32.0'
- channel: 'stable'
-
- - name: Install dependencies
- run: |
- cd mobile
- flutter pub get
-
- - name: Update app version
- run: |
- cd mobile
- # Update pubspec.yaml version
- if [[ "$IMAGE_TAG" != "latest" ]]; then
- sed -i "s/^version: .*/version: ${IMAGE_TAG#v}/" pubspec.yaml
- fi
-
- - name: Build F-Droid APK (unsigned)
- run: |
- cd mobile
- flutter build apk --release
-
- - name: Rename APK for F-Droid
- run: |
- cd mobile
- cp build/app/outputs/flutter-apk/app-release.apk build/app/outputs/flutter-apk/PinePods-fdroid-${IMAGE_TAG#v}.apk
-
- - name: Upload F-Droid APK artifact
- uses: actions/upload-artifact@v4
- with:
- name: fdroid-apk-build
- path: mobile/build/app/outputs/flutter-apk/PinePods-fdroid-*.apk
\ No newline at end of file
diff --git a/.github/workflows/build-flatpak.yml b/.github/workflows/build-flatpak.yml
index 773d16a2..744c9684 100644
--- a/.github/workflows/build-flatpak.yml
+++ b/.github/workflows/build-flatpak.yml
@@ -21,7 +21,7 @@ jobs:
if: ${{ github.event.workflow_run.conclusion == 'success' || github.event_name == 'workflow_dispatch' }}
steps:
- name: Checkout code
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Install Flatpak
run: |
diff --git a/.github/workflows/build-helm-chart.yml b/.github/workflows/build-helm-chart.yml
index 2b2729b3..93e61853 100644
--- a/.github/workflows/build-helm-chart.yml
+++ b/.github/workflows/build-helm-chart.yml
@@ -24,15 +24,10 @@ jobs:
steps:
- name: Checkout code
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
with:
- persist-credentials: false # This prevents the default token from being persisted in the local git config
-
- - name: Setup Git for push
- run: |
- git config --global user.name "github-actions[bot]"
- git config --global user.email "github-actions[bot]@users.noreply.github.com"
- git remote set-url origin https://x-access-token:${{ secrets.PUSH_PAT }}@github.com/${{ github.repository }}.git
+ token: ${{ secrets.PUSH_PAT }}
+ persist-credentials: true
- name: Setup Helm
uses: Azure/setup-helm@v4.2.0
@@ -80,8 +75,8 @@ jobs:
- uses: EndBug/add-and-commit@v9
with:
+ github_token: ${{ secrets.PUSH_PAT }}
committer_name: GitHub Actions
committer_email: actions@github.com
message: "Update Helm chart for release ${{ github.event.release.tag_name }}"
add: "docs"
- push: "origin main"
diff --git a/.github/workflows/build-ios-archive.yml b/.github/workflows/build-ios-archive.yml
deleted file mode 100644
index be8ce4fa..00000000
--- a/.github/workflows/build-ios-archive.yml
+++ /dev/null
@@ -1,197 +0,0 @@
-name: Build iOS Release Archive
-
-permissions:
- contents: read
- secrets: read
- actions: write
-
-on:
- workflow_dispatch:
- inputs:
- version:
- description: "Version tag (e.g., v0.7.9)"
- required: false
- default: "manual-build"
-
-jobs:
- build-ios:
- name: Build iOS Release Archive
- runs-on: macOS-latest
-
- steps:
- - name: Set Build Info
- run: |
- echo "VERSION_TAG=${{ github.event.inputs.version }}" >> $GITHUB_ENV
- echo "BUILD_NUMBER=$(date +%s)" >> $GITHUB_ENV
- echo "BUILD_DATE=$(date -u +'%Y-%m-%d %H:%M:%S UTC')" >> $GITHUB_ENV
-
- - name: Checkout repository
- uses: actions/checkout@v4
-
- - name: Setup Flutter
- uses: subosito/flutter-action@v2
- with:
- flutter-version: "3.32.0"
- channel: "stable"
-
- - name: Install dependencies
- run: |
- cd mobile
- flutter pub get
- cd ios
- pod install
-
- - name: Setup iOS signing
- env:
- IOS_CERTIFICATE_BASE64: ${{ secrets.IOS_CERTIFICATE_BASE64 }}
- IOS_CERTIFICATE_PASSWORD: ${{ secrets.IOS_CERTIFICATE_PASSWORD }}
- IOS_PROVISIONING_PROFILE_BASE64: ${{ secrets.IOS_PROVISIONING_PROFILE_BASE64 }}
- KEYCHAIN_PASSWORD: ${{ secrets.KEYCHAIN_PASSWORD }}
- run: |
- # Create keychain
- security create-keychain -p "$KEYCHAIN_PASSWORD" build.keychain
- security default-keychain -s build.keychain
- security unlock-keychain -p "$KEYCHAIN_PASSWORD" build.keychain
- security set-keychain-settings -t 3600 -l build.keychain
-
- # Import certificate
- echo "$IOS_CERTIFICATE_BASE64" | base64 -d > certificate.p12
- security import certificate.p12 -P "$IOS_CERTIFICATE_PASSWORD" -A
-
- # Install provisioning profile
- mkdir -p ~/Library/MobileDevice/Provisioning\ Profiles
- echo "$IOS_PROVISIONING_PROFILE_BASE64" | base64 -d > ~/Library/MobileDevice/Provisioning\ Profiles/build.mobileprovision
-
- - name: Update app version
- run: |
- cd mobile
- if [[ "$VERSION_TAG" != "manual-build" && "$VERSION_TAG" != "" ]]; then
- # Remove 'v' prefix if present
- CLEAN_VERSION=${VERSION_TAG#v}
- sed -i '' "s/^version: .*/version: $CLEAN_VERSION/" pubspec.yaml
- echo "Updated version to: $CLEAN_VERSION"
- fi
-
- - name: Create export options plist
- run: |
- cd mobile/ios
- cat > exportOptions.plist << EOF
-
-
-
-
- method
- app-store
- teamID
- ${{ secrets.IOS_TEAM_ID }}
- uploadBitcode
-
- uploadSymbols
-
- compileBitcode
-
-
-
- EOF
-
- - name: Build iOS app
- run: |
- cd mobile
- flutter build ios --release --no-codesign
- echo "iOS build completed"
-
- - name: Archive and sign iOS app
- run: |
- cd mobile/ios
- xcodebuild -workspace Runner.xcworkspace \
- -scheme Runner \
- -configuration Release \
- -destination generic/platform=iOS \
- -archivePath build/Runner.xcarchive \
- archive
-
- xcodebuild -exportArchive \
- -archivePath build/Runner.xcarchive \
- -exportPath build \
- -exportOptionsPlist exportOptions.plist
-
- - name: Prepare release artifacts
- run: |
- cd mobile
- mkdir -p ../release-artifacts/ios
-
- # Find and copy IPA
- find ios/build -name "*.ipa" -exec cp {} ../release-artifacts/ios/PinePods-${VERSION_TAG}.ipa \;
-
- # Create build info file
- cat > ../release-artifacts/ios/BUILD_INFO.txt << EOF
- PinePods Mobile - iOS Release Build
- ==================================
- Version: $VERSION_TAG
- Build Date: $BUILD_DATE
- Build Number: $BUILD_NUMBER
- Flutter Version: 3.32.0
- Bundle ID: com.gooseberrydevelopment.pinepods
-
- 📱 IPA FILE:
- ===========
- PinePods-${VERSION_TAG}.ipa - iOS App Store package
-
- 🚀 DISTRIBUTION:
- ===============
- • App Store: Upload IPA to App Store Connect
- • TestFlight: Upload via App Store Connect for beta testing
- • Enterprise: Use enterprise provisioning profile (separate build needed)
-
- ⚡ UPLOAD INSTRUCTIONS:
- ======================
- 1. Go to App Store Connect (appstoreconnect.apple.com)
- 2. Select your app → TestFlight or App Store tab
- 3. Click "+" to add new build
- 4. Upload the .ipa file
- 5. Wait for processing (10-30 minutes)
- 6. Submit for review when ready
- EOF
-
- - name: Upload iOS Release Artifacts
- uses: actions/upload-artifact@v4
- with:
- name: ios-release-${{ env.VERSION_TAG }}
- path: release-artifacts/ios/
- retention-days: 90
-
- - name: Create Release Summary
- run: |
- echo "# 🍎 iOS Release Build Complete" >> $GITHUB_STEP_SUMMARY
- echo "" >> $GITHUB_STEP_SUMMARY
- echo "**Version:** ${{ github.event.inputs.version }}" >> $GITHUB_STEP_SUMMARY
- echo "**Build Date:** $(date -u +'%Y-%m-%d %H:%M:%S UTC')" >> $GITHUB_STEP_SUMMARY
- echo "" >> $GITHUB_STEP_SUMMARY
-
- echo "## 📦 Generated Files" >> $GITHUB_STEP_SUMMARY
- echo "- 📱 **PinePods-${{ github.event.inputs.version }}.ipa** - App Store ready package" >> $GITHUB_STEP_SUMMARY
- echo "" >> $GITHUB_STEP_SUMMARY
- echo "## 📥 Download Instructions" >> $GITHUB_STEP_SUMMARY
- echo "1. Go to **Actions** tab → This workflow run" >> $GITHUB_STEP_SUMMARY
- echo "2. Scroll down to **Artifacts** section" >> $GITHUB_STEP_SUMMARY
- echo "3. Download \`ios-release-${{ github.event.inputs.version }}\`" >> $GITHUB_STEP_SUMMARY
- echo "4. Extract the ZIP file" >> $GITHUB_STEP_SUMMARY
- echo "" >> $GITHUB_STEP_SUMMARY
- echo "## 🚀 Next Steps" >> $GITHUB_STEP_SUMMARY
- echo "- Upload \`.ipa\` to App Store Connect" >> $GITHUB_STEP_SUMMARY
- echo "- Submit to TestFlight for beta testing" >> $GITHUB_STEP_SUMMARY
- echo "- Submit for App Store review when ready" >> $GITHUB_STEP_SUMMARY
- echo "" >> $GITHUB_STEP_SUMMARY
- echo "## 💡 Requirements" >> $GITHUB_STEP_SUMMARY
- echo "- Apple Developer Account (\$99/year)" >> $GITHUB_STEP_SUMMARY
- echo "- Valid distribution certificate and provisioning profile" >> $GITHUB_STEP_SUMMARY
- echo "- All iOS secrets configured in GitHub repository settings" >> $GITHUB_STEP_SUMMARY
-
- - name: Cleanup keychain and provisioning profile
- if: always()
- run: |
- if security list-keychains | grep -q "build.keychain"; then
- security delete-keychain build.keychain
- fi
- rm -f ~/Library/MobileDevice/Provisioning\ Profiles/build.mobileprovision
- rm -f certificate.p12
diff --git a/.github/workflows/build-snap.yml b/.github/workflows/build-snap.yml
index af8503e5..5141df1d 100644
--- a/.github/workflows/build-snap.yml
+++ b/.github/workflows/build-snap.yml
@@ -18,7 +18,7 @@ jobs:
if: ${{ github.event.workflow_run.conclusion == 'success' || github.event_name == 'workflow_dispatch' }}
steps:
- name: Checkout code
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Get version
id: get_version
diff --git a/.github/workflows/build-tauri-clients.yml b/.github/workflows/build-tauri-clients.yml
index 8de2d6ac..6d9a08e3 100644
--- a/.github/workflows/build-tauri-clients.yml
+++ b/.github/workflows/build-tauri-clients.yml
@@ -22,9 +22,7 @@ jobs:
- windows-latest
include:
- os: ubuntu-arm64
- runs-on:
- - runs-on=${{ github.run_id }}
- - runner=4cpu-linux-arm64
+ runs-on: ubuntu-24.04-arm
runs-on: ${{ matrix.runs-on || matrix.os }}
@@ -73,25 +71,16 @@ jobs:
if: ${{ matrix.os == 'windows-latest' }}
- name: Setup | Checkout
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- uses: hecrj/setup-rust-action@v2
with:
- rust-version: 1.86
+ rust-version: 1.89
targets: wasm32-unknown-unknown
- # Install cargo-binstall for macOS runners using direct download
- - name: Install cargo-binstall on macOS
- if: matrix.os == 'macos-latest'
- run: |
- export GITHUB_TOKEN=${{ secrets.RELEASE_TOKEN }}
- curl -L https://github.com/cargo-bins/cargo-binstall/releases/download/v1.9.0/cargo-binstall-universal-apple-darwin.zip -o cargo-binstall.zip
- unzip cargo-binstall.zip
- ./cargo-binstall -y --force cargo-binstall
-
- # Install cargo-binstall for other OSes using the standard method
+ # Install cargo-binstall for Linux/Windows
- name: Install cargo-binstall
- if: matrix.os != 'macos-latest'
+ if: matrix.os != 'macos-latest' && matrix.os != 'macOS-13'
uses: cargo-bins/cargo-binstall@main
- name: Depends install
@@ -105,7 +94,13 @@ jobs:
run: |
rustup target add wasm32-unknown-unknown
- - name: Install Trunk
+ - name: Install Trunk (macOS)
+ if: matrix.os == 'macos-latest' || matrix.os == 'macOS-13'
+ run: |
+ brew install trunk
+
+ - name: Install Trunk (Linux/Windows)
+ if: matrix.os != 'macos-latest' && matrix.os != 'macOS-13'
run: |
cargo binstall trunk -y
@@ -143,9 +138,10 @@ jobs:
- name: Build | Compile (UNIX)
run: |
- cd web/src-tauri
+ cd web
+ RUSTFLAGS="--cfg=web_sys_unstable_apis --cfg getrandom_backend=\"wasm_js\"" trunk build --features server_build
+ cd src-tauri
cat tauri.conf.json
- export RUSTFLAGS="--cfg=web_sys_unstable_apis --cfg getrandom_backend=\"wasm_js\""
cargo tauri build
pwd
ls
@@ -155,9 +151,10 @@ jobs:
- name: Build | Compile (Windows)
run: |
- cd web/src-tauri
+ cd web
+ powershell -ExecutionPolicy Bypass -File .\build.ps1
+ cd src-tauri
Get-Content tauri.conf.json
- $env:RUSTFLAGS="--cfg=web_sys_unstable_apis --cfg getrandom_backend=`"wasm_js`""
cargo tauri build
ls target/release/bundle
shell: pwsh
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index d17fe3da..58bbc8be 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -26,7 +26,7 @@ jobs:
--health-retries 5
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
@@ -56,11 +56,11 @@ jobs:
frontend-tests:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- uses: hecrj/setup-rust-action@v2
with:
- rust-version: 1.86
+ rust-version: 1.89
targets: wasm32-unknown-unknown
# Install cargo-binstall for other OSes using the standard method
diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml
index 7da34ba1..6ce793ef 100644
--- a/.github/workflows/docker-publish.yml
+++ b/.github/workflows/docker-publish.yml
@@ -36,7 +36,7 @@ jobs:
id-token: write
steps:
- name: Checkout code
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Log in to Docker Hub
@@ -55,16 +55,14 @@ jobs:
build-and-push-arm64:
needs: set-env
- runs-on:
- - runs-on=${{ github.run_id }}
- - runner=4cpu-linux-arm64
+ runs-on: ubuntu-24.04-arm
permissions:
contents: read
packages: write
id-token: write
steps:
- name: Checkout code
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Log in to Docker Hub
@@ -90,7 +88,7 @@ jobs:
id-token: write
steps:
- name: Checkout code
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Log in to Docker Hub
diff --git a/.github/workflows/nightly-docker-publish.yml b/.github/workflows/nightly-docker-publish.yml
index 6b098662..57a298bb 100644
--- a/.github/workflows/nightly-docker-publish.yml
+++ b/.github/workflows/nightly-docker-publish.yml
@@ -19,7 +19,7 @@ jobs:
id-token: write
steps:
- name: Checkout code
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
@@ -39,16 +39,14 @@ jobs:
run: echo ${{ steps.docker_build.outputs.digest }}
build-and-push-nightly-arm64:
- runs-on:
- - runs-on=${{ github.run_id }}
- - runner=2cpu-linux-arm64
+ runs-on: ubuntu-24.04-arm
permissions:
contents: read
packages: write
id-token: write
steps:
- name: Checkout code
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
@@ -76,7 +74,7 @@ jobs:
id-token: write
steps:
- name: Checkout code
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
diff --git a/.github/workflows/notification.yml b/.github/workflows/notification.yml
index e7884a34..dbd9ea93 100644
--- a/.github/workflows/notification.yml
+++ b/.github/workflows/notification.yml
@@ -21,7 +21,7 @@ jobs:
steps:
- name: Checkout code
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Fetch the latest release
id: fetch_release
@@ -32,6 +32,14 @@ jobs:
echo "Release URL: $release_url"
echo "::set-output name=version::$latest_release"
echo "::set-output name=release_url::$release_url"
+
+ # Check if this is an RC release
+ if [[ "$latest_release" == *"-rc"* ]]; then
+ echo "RC release detected, skipping Discord notification"
+ echo "::set-output name=is_rc::true"
+ else
+ echo "::set-output name=is_rc::false"
+ fi
- name: Set release message
id: set_message
@@ -45,7 +53,13 @@ jobs:
echo "::set-output name=message::$message"
fi
+ - name: Skip Discord notification for RC release
+ if: steps.fetch_release.outputs.is_rc == 'true'
+ run: |
+ echo "Skipping Discord notification for RC release: ${{ steps.fetch_release.outputs.version }}"
+
- name: Discord notification to announce deployment
+ if: steps.fetch_release.outputs.is_rc == 'false'
env:
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK }}
uses: Ilshidur/action-discord@master
diff --git a/.github/workflows/pre-release-version-update.yml b/.github/workflows/pre-release-version-update.yml
new file mode 100644
index 00000000..c360c0ca
--- /dev/null
+++ b/.github/workflows/pre-release-version-update.yml
@@ -0,0 +1,53 @@
+name: Pre-Release Version Update
+
+on:
+ workflow_dispatch:
+ inputs:
+ version:
+ description: "Version to set (e.g., 0.8.0)"
+ required: true
+ type: string
+
+jobs:
+ update-version:
+ name: Update Version Files
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+ token: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Update app version
+ run: |
+ cd mobile
+ VERSION_NAME=${{ github.event.inputs.version }}
+ # Calculate what the git count WILL BE after we commit (current + 1)
+ BUILD_NUMBER=$(($(git rev-list --count HEAD) + 1 + 20250000))
+
+ # Update pubspec.yaml version
+ sed -i "s/^version: .*/version: ${VERSION_NAME}+${BUILD_NUMBER}/" pubspec.yaml
+
+ # Update environment.dart constants
+ sed -i "s/static const _projectVersion = '[^']*';/static const _projectVersion = '${VERSION_NAME}';/" lib/core/environment.dart
+ sed -i "s/static const _build = '[^']*';/static const _build = '${BUILD_NUMBER}';/" lib/core/environment.dart
+
+ echo "Updated version to ${VERSION_NAME}+${BUILD_NUMBER}"
+
+ - name: Commit and push version update
+ run: |
+ git config --local user.email "action@github.com"
+ git config --local user.name "GitHub Action"
+ git add mobile/pubspec.yaml mobile/lib/core/environment.dart
+ git commit -m "chore: update version to ${{ github.event.inputs.version }} [skip ci]"
+ git push
+
+ - name: Summary
+ run: |
+ echo "✅ Version updated to ${{ github.event.inputs.version }}"
+ echo "📋 Next steps:"
+ echo "1. Create a GitHub release pointing to the latest commit"
+ echo "2. The release workflow will build from that exact commit"
+ echo "3. Version files will match the commit for reproducible builds"
\ No newline at end of file
diff --git a/.github/workflows/test-ios-app.yml b/.github/workflows/test-ios-app.yml
deleted file mode 100644
index 3bece6ec..00000000
--- a/.github/workflows/test-ios-app.yml
+++ /dev/null
@@ -1,122 +0,0 @@
-name: Build IOS Pinepods App (Legacy Tauri - Deprecated)
-
-on:
- # This workflow is deprecated in favor of build-ios-flutter.yml
- # release:
- # types: [published]
- workflow_dispatch:
- inputs:
- version:
- description: "Manual override version tag (optional)"
- required: false
-
-jobs:
- build:
- name: Build ios Release
- runs-on: macOS-latest
-
- steps:
- - name: Set Image Tag (Unix)
- run: echo "IMAGE_TAG=${{ github.event.release.tag_name || github.event.inputs.version || 'latest' }}" >> $GITHUB_ENV
-
- - name: Setup | Checkout
- uses: actions/checkout@v3
-
- - uses: hecrj/setup-rust-action@v2
- with:
- rust-version: 1.86
- targets: wasm32-unknown-unknown
-
- - name: Install cargo-binstall
- uses: cargo-bins/cargo-binstall@main
-
- - name: Depends install
- if: ${{ env.DEPENDS_SETUP == 'true' }}
- run: |
- sudo apt update
- sudo apt install -qy libgtk-3-dev
- sudo apt-get install -y libwebkit2gtk-4.0-dev libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf
-
- - name: wasm-addition
- run: |
- rustup target add wasm32-unknown-unknown
-
- - name: Install Trunk
- run: |
- cargo binstall trunk -y
-
- - name: Install Tauri
- run: |
- cargo install tauri-cli@2.0.0-rc.16 --locked
- - name: Update Tauri version
- run: |
- cd web/src-tauri
- sed -i '' "s/\"version\": \".*\"/\"version\": \"${IMAGE_TAG}\"/" tauri.conf.json
- cat tauri.conf.json
-
- - name: Build iOS app
- run: |
- cd web/src-tauri
- cargo tauri icon icons/Square1024x1024.png
- cargo tauri ios init
- cargo tauri ios build
- cargo tauri icon src-tauri/icons/Square1024x1024.png
- cargo tauri ios build
- # --release --export-method app-store-connect
- - name: Upload IPA
- uses: actions/upload-artifact@v4
- with:
- name: Pinepods-iOS
- path: web/src-tauri/gen/apple/build/arm64/*.ipa
-
- # - name: Upload to App Store Connect
- # env:
- # APPLE_API_KEY_ID: ${{ secrets.APPLE_API_KEY_ID }}
- # APPLE_API_ISSUER: ${{ secrets.APPLE_API_ISSUER }}
- # run: |
- # xcrun altool --upload-app --type ios --file "web/src-tauri/gen/apple/build/arm64/*.ipa" --apiKey $APPLE_API_KEY_ID --apiIssuer $APPLE_API_ISSUER
-
- # - name: Cleanup keychain and provisioning profile
- # if: ${{ always() }}
- # run: |
- # security delete-keychain $RUNNER_TEMP/app-signing.keychain-db
- # rm ~/Library/MobileDevice/Provisioning\ Profiles/build_pp.mobileprovision
-
- # - name: Archive build 1 (apk)
- # uses: actions/upload-artifact@v3
- # with:
- # name: ${{ matrix.os }}-build
- # path: ./web/src-tauri/gen/android/app/build/outputs/apk/universal/release/app-universal-release.apk
- # if: ${{ matrix.os == 'ubuntu-latest' }}
-
- # - name: Archive build 2 (aab)
- # uses: actions/upload-artifact@v3
- # with:
- # name: ${{ matrix.os }}-build
- # path: ./web/src-tauri/gen/android/app/build/outputs/apk/universal/release/app-universal-release.apk
- # if: ${{ matrix.os == 'ubuntu-latest' }}
-
- # - name: Archive build 2 (Ubuntu)
- # uses: actions/upload-artifact@v3
- # with:
- # name: ${{ matrix.os }}-build
- # path: ./web/src-tauri/target/release/bundle/appimage/${{ env.ARTIFACT_NAME2 }}
- # if: ${{ matrix.os == 'ubuntu-latest' }}
-
- # - name: Archive build 3 (Ubuntu)
- # uses: actions/upload-artifact@v3
- # with:
- # name: ${{ matrix.os }}-build
- # path: ./web/src-tauri/target/release/bundle/rpm/${{ env.ARTIFACT_NAME3 }}
- # if: ${{ matrix.os == 'ubuntu-latest' }}
-
- # - name: Upload release asset (Ubuntu - DEB)
- # if: github.event_name == 'release' && matrix.os == 'ubuntu-latest'
- # uses: actions/upload-release-asset@v1
- # env:
- # GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- # with:
- # upload_url: ${{ github.event.release.upload_url }}
- # asset_path: ./web/src-tauri/target/release/bundle/deb/${{ env.ARTIFACT_NAME1 }}
- # asset_name: ${{ env.ARTIFACT_NAME1 }}
- # asset_content_type: application/vnd.debian.binary-package
diff --git a/.github/workflows/test-pinepods.yml b/.github/workflows/test-pinepods.yml
index fbbc8e62..cf01dfa3 100644
--- a/.github/workflows/test-pinepods.yml
+++ b/.github/workflows/test-pinepods.yml
@@ -11,7 +11,7 @@ jobs:
test:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v4
- name: Build the Docker test container
run: docker build -t madeofpendletonwool/pinepods-test . -f dockerfile-test
- uses: rustsec/audit-check@v1.4.1
@@ -37,5 +37,5 @@ jobs:
- uses: taiki-e/cache-cargo-install-action@v1
with:
tool: cargo-checkmate
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- run: cargo-checkmate run ${{ matrix.phase }}
\ No newline at end of file
diff --git a/.github/workflows/update-aur-package.yml b/.github/workflows/update-aur-package.yml
index 462a0aa2..38f2d7f8 100644
--- a/.github/workflows/update-aur-package.yml
+++ b/.github/workflows/update-aur-package.yml
@@ -17,7 +17,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout code
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
- name: Set version
run: |
diff --git a/.gitignore b/.gitignore
index 596291b7..50518385 100644
--- a/.gitignore
+++ b/.gitignore
@@ -88,6 +88,8 @@ clients/mac-app/pinepods.spec
web/target/*
web/.idea/*
keystore.properties
+key.properties
+**/key.properties
# Virtual Environment
diff --git a/Backend/pinepods_backend/Cargo.lock b/Backend/pinepods_backend/Cargo.lock
new file mode 100644
index 00000000..3411cf4a
--- /dev/null
+++ b/Backend/pinepods_backend/Cargo.lock
@@ -0,0 +1,2712 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 4
+
+[[package]]
+name = "actix-codec"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5f7b0a21988c1bf877cf4759ef5ddaac04c1c9fe808c9142ecb78ba97d97a28a"
+dependencies = [
+ "bitflags",
+ "bytes",
+ "futures-core",
+ "futures-sink",
+ "memchr",
+ "pin-project-lite",
+ "tokio",
+ "tokio-util",
+ "tracing",
+]
+
+[[package]]
+name = "actix-cors"
+version = "0.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "daa239b93927be1ff123eebada5a3ff23e89f0124ccb8609234e5103d5a5ae6d"
+dependencies = [
+ "actix-utils",
+ "actix-web",
+ "derive_more",
+ "futures-util",
+ "log",
+ "once_cell",
+ "smallvec",
+]
+
+[[package]]
+name = "actix-http"
+version = "3.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "44dfe5c9e0004c623edc65391dfd51daa201e7e30ebd9c9bedf873048ec32bc2"
+dependencies = [
+ "actix-codec",
+ "actix-rt",
+ "actix-service",
+ "actix-utils",
+ "base64",
+ "bitflags",
+ "brotli",
+ "bytes",
+ "bytestring",
+ "derive_more",
+ "encoding_rs",
+ "flate2",
+ "foldhash",
+ "futures-core",
+ "h2 0.3.27",
+ "http 0.2.12",
+ "httparse",
+ "httpdate",
+ "itoa",
+ "language-tags",
+ "local-channel",
+ "mime",
+ "percent-encoding",
+ "pin-project-lite",
+ "rand",
+ "sha1",
+ "smallvec",
+ "tokio",
+ "tokio-util",
+ "tracing",
+ "zstd",
+]
+
+[[package]]
+name = "actix-macros"
+version = "0.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e01ed3140b2f8d422c68afa1ed2e85d996ea619c988ac834d255db32138655cb"
+dependencies = [
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "actix-router"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "13d324164c51f63867b57e73ba5936ea151b8a41a1d23d1031eeb9f70d0236f8"
+dependencies = [
+ "bytestring",
+ "cfg-if",
+ "http 0.2.12",
+ "regex",
+ "regex-lite",
+ "serde",
+ "tracing",
+]
+
+[[package]]
+name = "actix-rt"
+version = "2.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "24eda4e2a6e042aa4e55ac438a2ae052d3b5da0ecf83d7411e1a368946925208"
+dependencies = [
+ "futures-core",
+ "tokio",
+]
+
+[[package]]
+name = "actix-server"
+version = "2.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a65064ea4a457eaf07f2fba30b4c695bf43b721790e9530d26cb6f9019ff7502"
+dependencies = [
+ "actix-rt",
+ "actix-service",
+ "actix-utils",
+ "futures-core",
+ "futures-util",
+ "mio",
+ "socket2 0.5.10",
+ "tokio",
+ "tracing",
+]
+
+[[package]]
+name = "actix-service"
+version = "2.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9e46f36bf0e5af44bdc4bdb36fbbd421aa98c79a9bce724e1edeb3894e10dc7f"
+dependencies = [
+ "futures-core",
+ "pin-project-lite",
+]
+
+[[package]]
+name = "actix-utils"
+version = "3.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "88a1dcdff1466e3c2488e1cb5c36a71822750ad43839937f85d2f4d9f8b705d8"
+dependencies = [
+ "local-waker",
+ "pin-project-lite",
+]
+
+[[package]]
+name = "actix-web"
+version = "4.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a597b77b5c6d6a1e1097fddde329a83665e25c5437c696a3a9a4aa514a614dea"
+dependencies = [
+ "actix-codec",
+ "actix-http",
+ "actix-macros",
+ "actix-router",
+ "actix-rt",
+ "actix-server",
+ "actix-service",
+ "actix-utils",
+ "actix-web-codegen",
+ "bytes",
+ "bytestring",
+ "cfg-if",
+ "cookie",
+ "derive_more",
+ "encoding_rs",
+ "foldhash",
+ "futures-core",
+ "futures-util",
+ "impl-more",
+ "itoa",
+ "language-tags",
+ "log",
+ "mime",
+ "once_cell",
+ "pin-project-lite",
+ "regex",
+ "regex-lite",
+ "serde",
+ "serde_json",
+ "serde_urlencoded",
+ "smallvec",
+ "socket2 0.5.10",
+ "time",
+ "tracing",
+ "url",
+]
+
+[[package]]
+name = "actix-web-codegen"
+version = "4.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f591380e2e68490b5dfaf1dd1aa0ebe78d84ba7067078512b4ea6e4492d622b8"
+dependencies = [
+ "actix-router",
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "addr2line"
+version = "0.24.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1"
+dependencies = [
+ "gimli",
+]
+
+[[package]]
+name = "adler2"
+version = "2.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa"
+
+[[package]]
+name = "aho-corasick"
+version = "1.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "alloc-no-stdlib"
+version = "2.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cc7bb162ec39d46ab1ca8c77bf72e890535becd1751bb45f64c597edb4c8c6b3"
+
+[[package]]
+name = "alloc-stdlib"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "94fb8275041c72129eb51b7d0322c29b8387a0386127718b096429201a5d6ece"
+dependencies = [
+ "alloc-no-stdlib",
+]
+
+[[package]]
+name = "android-tzdata"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0"
+
+[[package]]
+name = "android_system_properties"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "anstream"
+version = "0.6.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3ae563653d1938f79b1ab1b5e668c87c76a9930414574a6583a7b7e11a8e6192"
+dependencies = [
+ "anstyle",
+ "anstyle-parse",
+ "anstyle-query",
+ "anstyle-wincon",
+ "colorchoice",
+ "is_terminal_polyfill",
+ "utf8parse",
+]
+
+[[package]]
+name = "anstyle"
+version = "1.0.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "862ed96ca487e809f1c8e5a8447f6ee2cf102f846893800b20cebdf541fc6bbd"
+
+[[package]]
+name = "anstyle-parse"
+version = "0.2.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2"
+dependencies = [
+ "utf8parse",
+]
+
+[[package]]
+name = "anstyle-query"
+version = "1.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9e231f6134f61b71076a3eab506c379d4f36122f2af15a9ff04415ea4c3339e2"
+dependencies = [
+ "windows-sys 0.60.2",
+]
+
+[[package]]
+name = "anstyle-wincon"
+version = "3.0.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3e0633414522a32ffaac8ac6cc8f748e090c5717661fddeea04219e2344f5f2a"
+dependencies = [
+ "anstyle",
+ "once_cell_polyfill",
+ "windows-sys 0.60.2",
+]
+
+[[package]]
+name = "atomic-waker"
+version = "1.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0"
+
+[[package]]
+name = "autocfg"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
+
+[[package]]
+name = "backtrace"
+version = "0.3.75"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6806a6321ec58106fea15becdad98371e28d92ccbc7c8f1b3b6dd724fe8f1002"
+dependencies = [
+ "addr2line",
+ "cfg-if",
+ "libc",
+ "miniz_oxide",
+ "object",
+ "rustc-demangle",
+ "windows-targets 0.52.6",
+]
+
+[[package]]
+name = "base64"
+version = "0.22.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
+
+[[package]]
+name = "bitflags"
+version = "2.9.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967"
+
+[[package]]
+name = "block-buffer"
+version = "0.10.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71"
+dependencies = [
+ "generic-array",
+]
+
+[[package]]
+name = "brotli"
+version = "8.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9991eea70ea4f293524138648e41ee89b0b2b12ddef3b255effa43c8056e0e0d"
+dependencies = [
+ "alloc-no-stdlib",
+ "alloc-stdlib",
+ "brotli-decompressor",
+]
+
+[[package]]
+name = "brotli-decompressor"
+version = "5.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "874bb8112abecc98cbd6d81ea4fa7e94fb9449648c93cc89aa40c81c24d7de03"
+dependencies = [
+ "alloc-no-stdlib",
+ "alloc-stdlib",
+]
+
+[[package]]
+name = "bumpalo"
+version = "3.19.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43"
+
+[[package]]
+name = "bytes"
+version = "1.10.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a"
+
+[[package]]
+name = "bytestring"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e465647ae23b2823b0753f50decb2d5a86d2bb2cac04788fafd1f80e45378e5f"
+dependencies = [
+ "bytes",
+]
+
+[[package]]
+name = "cc"
+version = "1.2.32"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2352e5597e9c544d5e6d9c95190d5d27738ade584fa8db0a16e130e5c2b5296e"
+dependencies = [
+ "jobserver",
+ "libc",
+ "shlex",
+]
+
+[[package]]
+name = "cfg-if"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268"
+
+[[package]]
+name = "cfg_aliases"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
+
+[[package]]
+name = "chrono"
+version = "0.4.41"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c469d952047f47f91b68d1cba3f10d63c11d73e4636f24f08daf0278abf01c4d"
+dependencies = [
+ "android-tzdata",
+ "iana-time-zone",
+ "js-sys",
+ "num-traits",
+ "serde",
+ "wasm-bindgen",
+ "windows-link",
+]
+
+[[package]]
+name = "colorchoice"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75"
+
+[[package]]
+name = "cookie"
+version = "0.16.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e859cd57d0710d9e06c381b550c06e76992472a8c6d527aecd2fc673dcc231fb"
+dependencies = [
+ "percent-encoding",
+ "time",
+ "version_check",
+]
+
+[[package]]
+name = "core-foundation"
+version = "0.9.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f"
+dependencies = [
+ "core-foundation-sys",
+ "libc",
+]
+
+[[package]]
+name = "core-foundation-sys"
+version = "0.8.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b"
+
+[[package]]
+name = "cpufeatures"
+version = "0.2.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "crc32fast"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "crypto-common"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
+dependencies = [
+ "generic-array",
+ "typenum",
+]
+
+[[package]]
+name = "deranged"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9c9e6a11ca8224451684bc0d7d5a7adbf8f2fd6887261a1cfc3c0432f9d4068e"
+dependencies = [
+ "powerfmt",
+]
+
+[[package]]
+name = "derive_more"
+version = "2.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "093242cf7570c207c83073cf82f79706fe7b8317e98620a47d5be7c3d8497678"
+dependencies = [
+ "derive_more-impl",
+]
+
+[[package]]
+name = "derive_more-impl"
+version = "2.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "unicode-xid",
+]
+
+[[package]]
+name = "digest"
+version = "0.10.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
+dependencies = [
+ "block-buffer",
+ "crypto-common",
+]
+
+[[package]]
+name = "displaydoc"
+version = "0.2.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "dotenvy"
+version = "0.15.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b"
+
+[[package]]
+name = "encoding_rs"
+version = "0.8.35"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "env_filter"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "186e05a59d4c50738528153b83b0b0194d3a29507dfec16eccd4b342903397d0"
+dependencies = [
+ "log",
+ "regex",
+]
+
+[[package]]
+name = "env_logger"
+version = "0.11.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "13c863f0904021b108aa8b2f55046443e6b1ebde8fd4a15c399893aae4fa069f"
+dependencies = [
+ "anstream",
+ "anstyle",
+ "env_filter",
+ "jiff",
+ "log",
+]
+
+[[package]]
+name = "equivalent"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f"
+
+[[package]]
+name = "errno"
+version = "0.3.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad"
+dependencies = [
+ "libc",
+ "windows-sys 0.60.2",
+]
+
+[[package]]
+name = "fastrand"
+version = "2.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
+
+[[package]]
+name = "flate2"
+version = "1.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4a3d7db9596fecd151c5f638c0ee5d5bd487b6e0ea232e5dc96d5250f6f94b1d"
+dependencies = [
+ "crc32fast",
+ "miniz_oxide",
+]
+
+[[package]]
+name = "fnv"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
+
+[[package]]
+name = "foldhash"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2"
+
+[[package]]
+name = "foreign-types"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1"
+dependencies = [
+ "foreign-types-shared",
+]
+
+[[package]]
+name = "foreign-types-shared"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b"
+
+[[package]]
+name = "form_urlencoded"
+version = "1.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456"
+dependencies = [
+ "percent-encoding",
+]
+
+[[package]]
+name = "futures-channel"
+version = "0.3.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10"
+dependencies = [
+ "futures-core",
+]
+
+[[package]]
+name = "futures-core"
+version = "0.3.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e"
+
+[[package]]
+name = "futures-sink"
+version = "0.3.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7"
+
+[[package]]
+name = "futures-task"
+version = "0.3.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988"
+
+[[package]]
+name = "futures-util"
+version = "0.3.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81"
+dependencies = [
+ "futures-core",
+ "futures-task",
+ "pin-project-lite",
+ "pin-utils",
+ "slab",
+]
+
+[[package]]
+name = "generic-array"
+version = "0.14.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
+dependencies = [
+ "typenum",
+ "version_check",
+]
+
+[[package]]
+name = "getrandom"
+version = "0.2.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "335ff9f135e4384c8150d6f27c6daed433577f86b4750418338c01a1a2528592"
+dependencies = [
+ "cfg-if",
+ "js-sys",
+ "libc",
+ "wasi 0.11.1+wasi-snapshot-preview1",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "getrandom"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4"
+dependencies = [
+ "cfg-if",
+ "js-sys",
+ "libc",
+ "r-efi",
+ "wasi 0.14.2+wasi-0.2.4",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "gimli"
+version = "0.31.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f"
+
+[[package]]
+name = "h2"
+version = "0.3.27"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0beca50380b1fc32983fc1cb4587bfa4bb9e78fc259aad4a0032d2080309222d"
+dependencies = [
+ "bytes",
+ "fnv",
+ "futures-core",
+ "futures-sink",
+ "futures-util",
+ "http 0.2.12",
+ "indexmap",
+ "slab",
+ "tokio",
+ "tokio-util",
+ "tracing",
+]
+
+[[package]]
+name = "h2"
+version = "0.4.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f3c0b69cfcb4e1b9f1bf2f53f95f766e4661169728ec61cd3fe5a0166f2d1386"
+dependencies = [
+ "atomic-waker",
+ "bytes",
+ "fnv",
+ "futures-core",
+ "futures-sink",
+ "http 1.3.1",
+ "indexmap",
+ "slab",
+ "tokio",
+ "tokio-util",
+ "tracing",
+]
+
+[[package]]
+name = "hashbrown"
+version = "0.15.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1"
+
+[[package]]
+name = "http"
+version = "0.2.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1"
+dependencies = [
+ "bytes",
+ "fnv",
+ "itoa",
+]
+
+[[package]]
+name = "http"
+version = "1.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565"
+dependencies = [
+ "bytes",
+ "fnv",
+ "itoa",
+]
+
+[[package]]
+name = "http-body"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184"
+dependencies = [
+ "bytes",
+ "http 1.3.1",
+]
+
+[[package]]
+name = "http-body-util"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a"
+dependencies = [
+ "bytes",
+ "futures-core",
+ "http 1.3.1",
+ "http-body",
+ "pin-project-lite",
+]
+
+[[package]]
+name = "httparse"
+version = "1.10.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87"
+
+[[package]]
+name = "httpdate"
+version = "1.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
+
+[[package]]
+name = "hyper"
+version = "1.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80"
+dependencies = [
+ "bytes",
+ "futures-channel",
+ "futures-util",
+ "h2 0.4.12",
+ "http 1.3.1",
+ "http-body",
+ "httparse",
+ "itoa",
+ "pin-project-lite",
+ "smallvec",
+ "tokio",
+ "want",
+]
+
+[[package]]
+name = "hyper-rustls"
+version = "0.27.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58"
+dependencies = [
+ "http 1.3.1",
+ "hyper",
+ "hyper-util",
+ "rustls",
+ "rustls-pki-types",
+ "tokio",
+ "tokio-rustls",
+ "tower-service",
+ "webpki-roots",
+]
+
+[[package]]
+name = "hyper-tls"
+version = "0.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0"
+dependencies = [
+ "bytes",
+ "http-body-util",
+ "hyper",
+ "hyper-util",
+ "native-tls",
+ "tokio",
+ "tokio-native-tls",
+ "tower-service",
+]
+
+[[package]]
+name = "hyper-util"
+version = "0.1.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8d9b05277c7e8da2c93a568989bb6207bef0112e8d17df7a6eda4a3cf143bc5e"
+dependencies = [
+ "base64",
+ "bytes",
+ "futures-channel",
+ "futures-core",
+ "futures-util",
+ "http 1.3.1",
+ "http-body",
+ "hyper",
+ "ipnet",
+ "libc",
+ "percent-encoding",
+ "pin-project-lite",
+ "socket2 0.6.0",
+ "system-configuration",
+ "tokio",
+ "tower-service",
+ "tracing",
+ "windows-registry",
+]
+
+[[package]]
+name = "iana-time-zone"
+version = "0.1.63"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b0c919e5debc312ad217002b8048a17b7d83f80703865bbfcfebb0458b0b27d8"
+dependencies = [
+ "android_system_properties",
+ "core-foundation-sys",
+ "iana-time-zone-haiku",
+ "js-sys",
+ "log",
+ "wasm-bindgen",
+ "windows-core",
+]
+
+[[package]]
+name = "iana-time-zone-haiku"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f31827a206f56af32e590ba56d5d2d085f558508192593743f16b2306495269f"
+dependencies = [
+ "cc",
+]
+
+[[package]]
+name = "icu_collections"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47"
+dependencies = [
+ "displaydoc",
+ "potential_utf",
+ "yoke",
+ "zerofrom",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_locale_core"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a"
+dependencies = [
+ "displaydoc",
+ "litemap",
+ "tinystr",
+ "writeable",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_normalizer"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979"
+dependencies = [
+ "displaydoc",
+ "icu_collections",
+ "icu_normalizer_data",
+ "icu_properties",
+ "icu_provider",
+ "smallvec",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_normalizer_data"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3"
+
+[[package]]
+name = "icu_properties"
+version = "2.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b"
+dependencies = [
+ "displaydoc",
+ "icu_collections",
+ "icu_locale_core",
+ "icu_properties_data",
+ "icu_provider",
+ "potential_utf",
+ "zerotrie",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_properties_data"
+version = "2.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632"
+
+[[package]]
+name = "icu_provider"
+version = "2.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af"
+dependencies = [
+ "displaydoc",
+ "icu_locale_core",
+ "stable_deref_trait",
+ "tinystr",
+ "writeable",
+ "yoke",
+ "zerofrom",
+ "zerotrie",
+ "zerovec",
+]
+
+[[package]]
+name = "idna"
+version = "1.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e"
+dependencies = [
+ "idna_adapter",
+ "smallvec",
+ "utf8_iter",
+]
+
+[[package]]
+name = "idna_adapter"
+version = "1.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3acae9609540aa318d1bc588455225fb2085b9ed0c4f6bd0d9d5bcd86f1a0344"
+dependencies = [
+ "icu_normalizer",
+ "icu_properties",
+]
+
+[[package]]
+name = "impl-more"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e8a5a9a0ff0086c7a148acb942baaabeadf9504d10400b5a05645853729b9cd2"
+
+[[package]]
+name = "indexmap"
+version = "2.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661"
+dependencies = [
+ "equivalent",
+ "hashbrown",
+]
+
+[[package]]
+name = "io-uring"
+version = "0.7.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d93587f37623a1a17d94ef2bc9ada592f5465fe7732084ab7beefabe5c77c0c4"
+dependencies = [
+ "bitflags",
+ "cfg-if",
+ "libc",
+]
+
+[[package]]
+name = "ipnet"
+version = "2.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130"
+
+[[package]]
+name = "iri-string"
+version = "0.7.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dbc5ebe9c3a1a7a5127f920a418f7585e9e758e911d0466ed004f393b0e380b2"
+dependencies = [
+ "memchr",
+ "serde",
+]
+
+[[package]]
+name = "is_terminal_polyfill"
+version = "1.70.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf"
+
+[[package]]
+name = "itoa"
+version = "1.0.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
+
+[[package]]
+name = "jiff"
+version = "0.2.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "be1f93b8b1eb69c77f24bbb0afdf66f54b632ee39af40ca21c4365a1d7347e49"
+dependencies = [
+ "jiff-static",
+ "log",
+ "portable-atomic",
+ "portable-atomic-util",
+ "serde",
+]
+
+[[package]]
+name = "jiff-static"
+version = "0.2.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "03343451ff899767262ec32146f6d559dd759fdadf42ff0e227c7c48f72594b4"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "jobserver"
+version = "0.1.33"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "38f262f097c174adebe41eb73d66ae9c06b2844fb0da69969647bbddd9b0538a"
+dependencies = [
+ "getrandom 0.3.3",
+ "libc",
+]
+
+[[package]]
+name = "js-sys"
+version = "0.3.77"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f"
+dependencies = [
+ "once_cell",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "language-tags"
+version = "0.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d4345964bb142484797b161f473a503a434de77149dd8c7427788c6e13379388"
+
+[[package]]
+name = "libc"
+version = "0.2.175"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6a82ae493e598baaea5209805c49bbf2ea7de956d50d7da0da1164f9c6d28543"
+
+[[package]]
+name = "linux-raw-sys"
+version = "0.9.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12"
+
+[[package]]
+name = "litemap"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956"
+
+[[package]]
+name = "local-channel"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b6cbc85e69b8df4b8bb8b89ec634e7189099cea8927a276b7384ce5488e53ec8"
+dependencies = [
+ "futures-core",
+ "futures-sink",
+ "local-waker",
+]
+
+[[package]]
+name = "local-waker"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4d873d7c67ce09b42110d801813efbc9364414e356be9935700d368351657487"
+
+[[package]]
+name = "lock_api"
+version = "0.4.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "96936507f153605bddfcda068dd804796c84324ed2510809e5b2a624c81da765"
+dependencies = [
+ "autocfg",
+ "scopeguard",
+]
+
+[[package]]
+name = "log"
+version = "0.4.27"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
+
+[[package]]
+name = "lru-slab"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154"
+
+[[package]]
+name = "memchr"
+version = "2.7.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0"
+
+[[package]]
+name = "mime"
+version = "0.3.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
+
+[[package]]
+name = "miniz_oxide"
+version = "0.8.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316"
+dependencies = [
+ "adler2",
+]
+
+[[package]]
+name = "mio"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c"
+dependencies = [
+ "libc",
+ "log",
+ "wasi 0.11.1+wasi-snapshot-preview1",
+ "windows-sys 0.59.0",
+]
+
+[[package]]
+name = "native-tls"
+version = "0.2.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e"
+dependencies = [
+ "libc",
+ "log",
+ "openssl",
+ "openssl-probe",
+ "openssl-sys",
+ "schannel",
+ "security-framework",
+ "security-framework-sys",
+ "tempfile",
+]
+
+[[package]]
+name = "num-conv"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9"
+
+[[package]]
+name = "num-traits"
+version = "0.2.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
+name = "object"
+version = "0.36.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "once_cell"
+version = "1.21.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
+
+[[package]]
+name = "once_cell_polyfill"
+version = "1.70.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad"
+
+[[package]]
+name = "openssl"
+version = "0.10.73"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8505734d46c8ab1e19a1dce3aef597ad87dcb4c37e7188231769bd6bd51cebf8"
+dependencies = [
+ "bitflags",
+ "cfg-if",
+ "foreign-types",
+ "libc",
+ "once_cell",
+ "openssl-macros",
+ "openssl-sys",
+]
+
+[[package]]
+name = "openssl-macros"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "openssl-probe"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e"
+
+[[package]]
+name = "openssl-sys"
+version = "0.9.109"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "90096e2e47630d78b7d1c20952dc621f957103f8bc2c8359ec81290d75238571"
+dependencies = [
+ "cc",
+ "libc",
+ "pkg-config",
+ "vcpkg",
+]
+
+[[package]]
+name = "parking_lot"
+version = "0.12.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "70d58bf43669b5795d1576d0641cfb6fbb2057bf629506267a92807158584a13"
+dependencies = [
+ "lock_api",
+ "parking_lot_core",
+]
+
+[[package]]
+name = "parking_lot_core"
+version = "0.9.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bc838d2a56b5b1a6c25f55575dfc605fabb63bb2365f6c2353ef9159aa69e4a5"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "redox_syscall",
+ "smallvec",
+ "windows-targets 0.52.6",
+]
+
+[[package]]
+name = "percent-encoding"
+version = "2.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
+
+[[package]]
+name = "pin-project-lite"
+version = "0.2.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b"
+
+[[package]]
+name = "pin-utils"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
+
+[[package]]
+name = "pinepods_backend"
+version = "0.1.0"
+dependencies = [
+ "actix-cors",
+ "actix-web",
+ "chrono",
+ "dotenvy",
+ "env_logger",
+ "log",
+ "reqwest",
+ "serde",
+ "serde_json",
+ "sha1",
+ "urlencoding",
+]
+
+[[package]]
+name = "pkg-config"
+version = "0.3.32"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c"
+
+[[package]]
+name = "portable-atomic"
+version = "1.11.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483"
+
+[[package]]
+name = "portable-atomic-util"
+version = "0.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d8a2f0d8d040d7848a709caf78912debcc3f33ee4b3cac47d73d1e1069e83507"
+dependencies = [
+ "portable-atomic",
+]
+
+[[package]]
+name = "potential_utf"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e5a7c30837279ca13e7c867e9e40053bc68740f988cb07f7ca6df43cc734b585"
+dependencies = [
+ "zerovec",
+]
+
+[[package]]
+name = "powerfmt"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391"
+
+[[package]]
+name = "ppv-lite86"
+version = "0.2.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "85eae3c4ed2f50dcfe72643da4befc30deadb458a9b590d720cde2f2b1e97da9"
+dependencies = [
+ "zerocopy",
+]
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.96"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "beef09f85ae72cea1ef96ba6870c51e6382ebfa4f0e85b643459331f3daa5be0"
+dependencies = [
+ "unicode-ident",
+]
+
+[[package]]
+name = "quinn"
+version = "0.11.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "626214629cda6781b6dc1d316ba307189c85ba657213ce642d9c77670f8202c8"
+dependencies = [
+ "bytes",
+ "cfg_aliases",
+ "pin-project-lite",
+ "quinn-proto",
+ "quinn-udp",
+ "rustc-hash",
+ "rustls",
+ "socket2 0.5.10",
+ "thiserror",
+ "tokio",
+ "tracing",
+ "web-time",
+]
+
+[[package]]
+name = "quinn-proto"
+version = "0.11.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "49df843a9161c85bb8aae55f101bc0bac8bcafd637a620d9122fd7e0b2f7422e"
+dependencies = [
+ "bytes",
+ "getrandom 0.3.3",
+ "lru-slab",
+ "rand",
+ "ring",
+ "rustc-hash",
+ "rustls",
+ "rustls-pki-types",
+ "slab",
+ "thiserror",
+ "tinyvec",
+ "tracing",
+ "web-time",
+]
+
+[[package]]
+name = "quinn-udp"
+version = "0.5.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fcebb1209ee276352ef14ff8732e24cc2b02bbac986cd74a4c81bcb2f9881970"
+dependencies = [
+ "cfg_aliases",
+ "libc",
+ "once_cell",
+ "socket2 0.5.10",
+ "tracing",
+ "windows-sys 0.59.0",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.40"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "r-efi"
+version = "5.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
+
+[[package]]
+name = "rand"
+version = "0.9.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1"
+dependencies = [
+ "rand_chacha",
+ "rand_core",
+]
+
+[[package]]
+name = "rand_chacha"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb"
+dependencies = [
+ "ppv-lite86",
+ "rand_core",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.9.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38"
+dependencies = [
+ "getrandom 0.3.3",
+]
+
+[[package]]
+name = "redox_syscall"
+version = "0.5.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5407465600fb0548f1442edf71dd20683c6ed326200ace4b1ef0763521bb3b77"
+dependencies = [
+ "bitflags",
+]
+
+[[package]]
+name = "regex"
+version = "1.11.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-automata",
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-automata"
+version = "0.4.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-lite"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "53a49587ad06b26609c52e423de037e7f57f20d53535d66e08c695f347df952a"
+
+[[package]]
+name = "regex-syntax"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c"
+
+[[package]]
+name = "reqwest"
+version = "0.12.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cbc931937e6ca3a06e3b6c0aa7841849b160a90351d6ab467a8b9b9959767531"
+dependencies = [
+ "base64",
+ "bytes",
+ "encoding_rs",
+ "futures-core",
+ "h2 0.4.12",
+ "http 1.3.1",
+ "http-body",
+ "http-body-util",
+ "hyper",
+ "hyper-rustls",
+ "hyper-tls",
+ "hyper-util",
+ "js-sys",
+ "log",
+ "mime",
+ "native-tls",
+ "percent-encoding",
+ "pin-project-lite",
+ "quinn",
+ "rustls",
+ "rustls-pki-types",
+ "serde",
+ "serde_json",
+ "serde_urlencoded",
+ "sync_wrapper",
+ "tokio",
+ "tokio-native-tls",
+ "tokio-rustls",
+ "tower",
+ "tower-http",
+ "tower-service",
+ "url",
+ "wasm-bindgen",
+ "wasm-bindgen-futures",
+ "web-sys",
+ "webpki-roots",
+]
+
+[[package]]
+name = "ring"
+version = "0.17.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7"
+dependencies = [
+ "cc",
+ "cfg-if",
+ "getrandom 0.2.16",
+ "libc",
+ "untrusted",
+ "windows-sys 0.52.0",
+]
+
+[[package]]
+name = "rustc-demangle"
+version = "0.1.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "56f7d92ca342cea22a06f2121d944b4fd82af56988c270852495420f961d4ace"
+
+[[package]]
+name = "rustc-hash"
+version = "2.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d"
+
+[[package]]
+name = "rustix"
+version = "1.0.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "11181fbabf243db407ef8df94a6ce0b2f9a733bd8be4ad02b4eda9602296cac8"
+dependencies = [
+ "bitflags",
+ "errno",
+ "libc",
+ "linux-raw-sys",
+ "windows-sys 0.60.2",
+]
+
+[[package]]
+name = "rustls"
+version = "0.23.31"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c0ebcbd2f03de0fc1122ad9bb24b127a5a6cd51d72604a3f3c50ac459762b6cc"
+dependencies = [
+ "once_cell",
+ "ring",
+ "rustls-pki-types",
+ "rustls-webpki",
+ "subtle",
+ "zeroize",
+]
+
+[[package]]
+name = "rustls-pki-types"
+version = "1.12.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "229a4a4c221013e7e1f1a043678c5cc39fe5171437c88fb47151a21e6f5b5c79"
+dependencies = [
+ "web-time",
+ "zeroize",
+]
+
+[[package]]
+name = "rustls-webpki"
+version = "0.103.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0a17884ae0c1b773f1ccd2bd4a8c72f16da897310a98b0e84bf349ad5ead92fc"
+dependencies = [
+ "ring",
+ "rustls-pki-types",
+ "untrusted",
+]
+
+[[package]]
+name = "rustversion"
+version = "1.0.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d"
+
+[[package]]
+name = "ryu"
+version = "1.0.20"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
+
+[[package]]
+name = "schannel"
+version = "0.1.27"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d"
+dependencies = [
+ "windows-sys 0.59.0",
+]
+
+[[package]]
+name = "scopeguard"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
+
+[[package]]
+name = "security-framework"
+version = "2.11.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02"
+dependencies = [
+ "bitflags",
+ "core-foundation",
+ "core-foundation-sys",
+ "libc",
+ "security-framework-sys",
+]
+
+[[package]]
+name = "security-framework-sys"
+version = "2.14.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "49db231d56a190491cb4aeda9527f1ad45345af50b0851622a7adb8c03b01c32"
+dependencies = [
+ "core-foundation-sys",
+ "libc",
+]
+
+[[package]]
+name = "serde"
+version = "1.0.219"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.219"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "serde_json"
+version = "1.0.142"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "030fedb782600dcbd6f02d479bf0d817ac3bb40d644745b769d6a96bc3afc5a7"
+dependencies = [
+ "itoa",
+ "memchr",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "serde_urlencoded"
+version = "0.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd"
+dependencies = [
+ "form_urlencoded",
+ "itoa",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "sha1"
+version = "0.10.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba"
+dependencies = [
+ "cfg-if",
+ "cpufeatures",
+ "digest",
+]
+
+[[package]]
+name = "shlex"
+version = "1.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
+
+[[package]]
+name = "signal-hook-registry"
+version = "1.4.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b2a4719bff48cee6b39d12c020eeb490953ad2443b7055bd0b21fca26bd8c28b"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "slab"
+version = "0.4.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589"
+
+[[package]]
+name = "smallvec"
+version = "1.15.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03"
+
+[[package]]
+name = "socket2"
+version = "0.5.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678"
+dependencies = [
+ "libc",
+ "windows-sys 0.52.0",
+]
+
+[[package]]
+name = "socket2"
+version = "0.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "233504af464074f9d066d7b5416c5f9b894a5862a6506e306f7b816cdd6f1807"
+dependencies = [
+ "libc",
+ "windows-sys 0.59.0",
+]
+
+[[package]]
+name = "stable_deref_trait"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
+
+[[package]]
+name = "subtle"
+version = "2.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
+
+[[package]]
+name = "syn"
+version = "2.0.104"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "17b6f705963418cdb9927482fa304bc562ece2fdd4f616084c50b7023b435a40"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
+name = "sync_wrapper"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263"
+dependencies = [
+ "futures-core",
+]
+
+[[package]]
+name = "synstructure"
+version = "0.13.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "system-configuration"
+version = "0.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b"
+dependencies = [
+ "bitflags",
+ "core-foundation",
+ "system-configuration-sys",
+]
+
+[[package]]
+name = "system-configuration-sys"
+version = "0.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4"
+dependencies = [
+ "core-foundation-sys",
+ "libc",
+]
+
+[[package]]
+name = "tempfile"
+version = "3.20.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1"
+dependencies = [
+ "fastrand",
+ "getrandom 0.3.3",
+ "once_cell",
+ "rustix",
+ "windows-sys 0.59.0",
+]
+
+[[package]]
+name = "thiserror"
+version = "2.0.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708"
+dependencies = [
+ "thiserror-impl",
+]
+
+[[package]]
+name = "thiserror-impl"
+version = "2.0.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "time"
+version = "0.3.41"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8a7619e19bc266e0f9c5e6686659d394bc57973859340060a69221e57dbc0c40"
+dependencies = [
+ "deranged",
+ "itoa",
+ "num-conv",
+ "powerfmt",
+ "serde",
+ "time-core",
+ "time-macros",
+]
+
+[[package]]
+name = "time-core"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c9e9a38711f559d9e3ce1cdb06dd7c5b8ea546bc90052da6d06bb76da74bb07c"
+
+[[package]]
+name = "time-macros"
+version = "0.2.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3526739392ec93fd8b359c8e98514cb3e8e021beb4e5f597b00a0221f8ed8a49"
+dependencies = [
+ "num-conv",
+ "time-core",
+]
+
+[[package]]
+name = "tinystr"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b"
+dependencies = [
+ "displaydoc",
+ "zerovec",
+]
+
+[[package]]
+name = "tinyvec"
+version = "1.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09b3661f17e86524eccd4371ab0429194e0d7c008abb45f7a7495b1719463c71"
+dependencies = [
+ "tinyvec_macros",
+]
+
+[[package]]
+name = "tinyvec_macros"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
+
+[[package]]
+name = "tokio"
+version = "1.47.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "89e49afdadebb872d3145a5638b59eb0691ea23e46ca484037cfab3b76b95038"
+dependencies = [
+ "backtrace",
+ "bytes",
+ "io-uring",
+ "libc",
+ "mio",
+ "parking_lot",
+ "pin-project-lite",
+ "signal-hook-registry",
+ "slab",
+ "socket2 0.6.0",
+ "windows-sys 0.59.0",
+]
+
+[[package]]
+name = "tokio-native-tls"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2"
+dependencies = [
+ "native-tls",
+ "tokio",
+]
+
+[[package]]
+name = "tokio-rustls"
+version = "0.26.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8e727b36a1a0e8b74c376ac2211e40c2c8af09fb4013c60d910495810f008e9b"
+dependencies = [
+ "rustls",
+ "tokio",
+]
+
+[[package]]
+name = "tokio-util"
+version = "0.7.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "14307c986784f72ef81c89db7d9e28d6ac26d16213b109ea501696195e6e3ce5"
+dependencies = [
+ "bytes",
+ "futures-core",
+ "futures-sink",
+ "pin-project-lite",
+ "tokio",
+]
+
+[[package]]
+name = "tower"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9"
+dependencies = [
+ "futures-core",
+ "futures-util",
+ "pin-project-lite",
+ "sync_wrapper",
+ "tokio",
+ "tower-layer",
+ "tower-service",
+]
+
+[[package]]
+name = "tower-http"
+version = "0.6.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2"
+dependencies = [
+ "bitflags",
+ "bytes",
+ "futures-util",
+ "http 1.3.1",
+ "http-body",
+ "iri-string",
+ "pin-project-lite",
+ "tower",
+ "tower-layer",
+ "tower-service",
+]
+
+[[package]]
+name = "tower-layer"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e"
+
+[[package]]
+name = "tower-service"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3"
+
+[[package]]
+name = "tracing"
+version = "0.1.41"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0"
+dependencies = [
+ "log",
+ "pin-project-lite",
+ "tracing-attributes",
+ "tracing-core",
+]
+
+[[package]]
+name = "tracing-attributes"
+version = "0.1.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "tracing-core"
+version = "0.1.34"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678"
+dependencies = [
+ "once_cell",
+]
+
+[[package]]
+name = "try-lock"
+version = "0.2.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
+
+[[package]]
+name = "typenum"
+version = "1.18.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f"
+
+[[package]]
+name = "unicode-ident"
+version = "1.0.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512"
+
+[[package]]
+name = "unicode-xid"
+version = "0.2.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853"
+
+[[package]]
+name = "untrusted"
+version = "0.9.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
+
+[[package]]
+name = "url"
+version = "2.5.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60"
+dependencies = [
+ "form_urlencoded",
+ "idna",
+ "percent-encoding",
+]
+
+[[package]]
+name = "urlencoding"
+version = "2.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da"
+
+[[package]]
+name = "utf8_iter"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be"
+
+[[package]]
+name = "utf8parse"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
+
+[[package]]
+name = "vcpkg"
+version = "0.2.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
+
+[[package]]
+name = "version_check"
+version = "0.9.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
+
+[[package]]
+name = "want"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e"
+dependencies = [
+ "try-lock",
+]
+
+[[package]]
+name = "wasi"
+version = "0.11.1+wasi-snapshot-preview1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b"
+
+[[package]]
+name = "wasi"
+version = "0.14.2+wasi-0.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3"
+dependencies = [
+ "wit-bindgen-rt",
+]
+
+[[package]]
+name = "wasm-bindgen"
+version = "0.2.100"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5"
+dependencies = [
+ "cfg-if",
+ "once_cell",
+ "rustversion",
+ "wasm-bindgen-macro",
+]
+
+[[package]]
+name = "wasm-bindgen-backend"
+version = "0.2.100"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6"
+dependencies = [
+ "bumpalo",
+ "log",
+ "proc-macro2",
+ "quote",
+ "syn",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-futures"
+version = "0.4.50"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61"
+dependencies = [
+ "cfg-if",
+ "js-sys",
+ "once_cell",
+ "wasm-bindgen",
+ "web-sys",
+]
+
+[[package]]
+name = "wasm-bindgen-macro"
+version = "0.2.100"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407"
+dependencies = [
+ "quote",
+ "wasm-bindgen-macro-support",
+]
+
+[[package]]
+name = "wasm-bindgen-macro-support"
+version = "0.2.100"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "wasm-bindgen-backend",
+ "wasm-bindgen-shared",
+]
+
+[[package]]
+name = "wasm-bindgen-shared"
+version = "0.2.100"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d"
+dependencies = [
+ "unicode-ident",
+]
+
+[[package]]
+name = "web-sys"
+version = "0.3.77"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2"
+dependencies = [
+ "js-sys",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "web-time"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb"
+dependencies = [
+ "js-sys",
+ "wasm-bindgen",
+]
+
+[[package]]
+name = "webpki-roots"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7e8983c3ab33d6fb807cfcdad2491c4ea8cbc8ed839181c7dfd9c67c83e261b2"
+dependencies = [
+ "rustls-pki-types",
+]
+
+[[package]]
+name = "windows-core"
+version = "0.61.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3"
+dependencies = [
+ "windows-implement",
+ "windows-interface",
+ "windows-link",
+ "windows-result",
+ "windows-strings",
+]
+
+[[package]]
+name = "windows-implement"
+version = "0.60.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "windows-interface"
+version = "0.59.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "windows-link"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a"
+
+[[package]]
+name = "windows-registry"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5b8a9ed28765efc97bbc954883f4e6796c33a06546ebafacbabee9696967499e"
+dependencies = [
+ "windows-link",
+ "windows-result",
+ "windows-strings",
+]
+
+[[package]]
+name = "windows-result"
+version = "0.3.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6"
+dependencies = [
+ "windows-link",
+]
+
+[[package]]
+name = "windows-strings"
+version = "0.4.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57"
+dependencies = [
+ "windows-link",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.52.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
+dependencies = [
+ "windows-targets 0.52.6",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.59.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b"
+dependencies = [
+ "windows-targets 0.52.6",
+]
+
+[[package]]
+name = "windows-sys"
+version = "0.60.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb"
+dependencies = [
+ "windows-targets 0.53.3",
+]
+
+[[package]]
+name = "windows-targets"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
+dependencies = [
+ "windows_aarch64_gnullvm 0.52.6",
+ "windows_aarch64_msvc 0.52.6",
+ "windows_i686_gnu 0.52.6",
+ "windows_i686_gnullvm 0.52.6",
+ "windows_i686_msvc 0.52.6",
+ "windows_x86_64_gnu 0.52.6",
+ "windows_x86_64_gnullvm 0.52.6",
+ "windows_x86_64_msvc 0.52.6",
+]
+
+[[package]]
+name = "windows-targets"
+version = "0.53.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d5fe6031c4041849d7c496a8ded650796e7b6ecc19df1a431c1a363342e5dc91"
+dependencies = [
+ "windows-link",
+ "windows_aarch64_gnullvm 0.53.0",
+ "windows_aarch64_msvc 0.53.0",
+ "windows_i686_gnu 0.53.0",
+ "windows_i686_gnullvm 0.53.0",
+ "windows_i686_msvc 0.53.0",
+ "windows_x86_64_gnu 0.53.0",
+ "windows_x86_64_gnullvm 0.53.0",
+ "windows_x86_64_msvc 0.53.0",
+]
+
+[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
+
+[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764"
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3"
+
+[[package]]
+name = "windows_i686_gnullvm"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
+
+[[package]]
+name = "windows_i686_gnullvm"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba"
+
+[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
+
+[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.53.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486"
+
+[[package]]
+name = "wit-bindgen-rt"
+version = "0.39.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1"
+dependencies = [
+ "bitflags",
+]
+
+[[package]]
+name = "writeable"
+version = "0.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb"
+
+[[package]]
+name = "yoke"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc"
+dependencies = [
+ "serde",
+ "stable_deref_trait",
+ "yoke-derive",
+ "zerofrom",
+]
+
+[[package]]
+name = "yoke-derive"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "synstructure",
+]
+
+[[package]]
+name = "zerocopy"
+version = "0.8.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1039dd0d3c310cf05de012d8a39ff557cb0d23087fd44cad61df08fc31907a2f"
+dependencies = [
+ "zerocopy-derive",
+]
+
+[[package]]
+name = "zerocopy-derive"
+version = "0.8.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9ecf5b4cc5364572d7f4c329661bcc82724222973f2cab6f050a4e5c22f75181"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "zerofrom"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5"
+dependencies = [
+ "zerofrom-derive",
+]
+
+[[package]]
+name = "zerofrom-derive"
+version = "0.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "synstructure",
+]
+
+[[package]]
+name = "zeroize"
+version = "1.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde"
+
+[[package]]
+name = "zerotrie"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595"
+dependencies = [
+ "displaydoc",
+ "yoke",
+ "zerofrom",
+]
+
+[[package]]
+name = "zerovec"
+version = "0.11.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e7aa2bd55086f1ab526693ecbe444205da57e25f4489879da80635a46d90e73b"
+dependencies = [
+ "yoke",
+ "zerofrom",
+ "zerovec-derive",
+]
+
+[[package]]
+name = "zerovec-derive"
+version = "0.11.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "zstd"
+version = "0.13.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e91ee311a569c327171651566e07972200e76fcfe2242a4fa446149a3881c08a"
+dependencies = [
+ "zstd-safe",
+]
+
+[[package]]
+name = "zstd-safe"
+version = "7.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8f49c4d5f0abb602a93fb8736af2a4f4dd9512e36f7f570d66e65ff867ed3b9d"
+dependencies = [
+ "zstd-sys",
+]
+
+[[package]]
+name = "zstd-sys"
+version = "2.0.15+zstd.1.5.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eb81183ddd97d0c74cedf1d50d85c8d08c1b8b68ee863bdee9e706eedba1a237"
+dependencies = [
+ "cc",
+ "pkg-config",
+]
diff --git a/Backend/pinepods_backend/Cargo.toml b/Backend/pinepods_backend/Cargo.toml
index cd683e4b..773afca6 100644
--- a/Backend/pinepods_backend/Cargo.toml
+++ b/Backend/pinepods_backend/Cargo.toml
@@ -7,13 +7,13 @@ edition = "2021"
[dependencies]
actix-web = "4.11.0"
-serde = { version = "1.0.209", features = ["derive"] }
-serde_json = "1.0.141"
-reqwest = { version = "0.12.22", features = ["json", "rustls-tls"] }
+serde = { version = "1.0.225", features = ["derive"] }
+serde_json = "1.0.145"
+reqwest = { version = "0.12.23", features = ["json", "rustls-tls"] }
env_logger = "0.11.8"
-log = "0.4.27"
+log = "0.4.28"
dotenvy = "0.15.7"
sha1 = "0.10.6"
urlencoding = "2.1.3"
actix-cors = "0.7.1"
-chrono = { version = "0.4.41", features = ["serde"] }
+chrono = { version = "0.4.42", features = ["serde"] }
diff --git a/Dockerfile.validator b/Dockerfile.validator
new file mode 100644
index 00000000..c99992a7
--- /dev/null
+++ b/Dockerfile.validator
@@ -0,0 +1,28 @@
+FROM python:3.11-slim
+
+# Install PostgreSQL dev libraries and required packages
+RUN apt-get update && apt-get install -y \
+ libpq-dev \
+ gcc \
+ && rm -rf /var/lib/apt/lists/*
+
+# Install required packages
+RUN pip install psycopg[binary] mysql-connector-python cryptography passlib argon2-cffi
+
+# Copy validation scripts
+COPY database_functions/ /app/database_functions/
+COPY validate_db.py /app/
+
+# Set working directory
+WORKDIR /app
+
+# Set default environment variables for MySQL (TEST ONLY - NOT SECURE)
+ENV DB_TYPE=mysql
+ENV DB_HOST=mysql_db
+ENV DB_PORT=3306
+ENV DB_USER=root
+ENV DB_PASSWORD=test_password_123
+ENV DB_NAME=pinepods_database
+
+# Run validator
+CMD ["python", "validate_db.py", "--verbose"]
\ No newline at end of file
diff --git a/Dockerfile.validator.postgres b/Dockerfile.validator.postgres
new file mode 100644
index 00000000..d733ca8d
--- /dev/null
+++ b/Dockerfile.validator.postgres
@@ -0,0 +1,28 @@
+FROM python:3.11-slim
+
+# Install PostgreSQL dev libraries and required packages
+RUN apt-get update && apt-get install -y \
+ libpq-dev \
+ gcc \
+ && rm -rf /var/lib/apt/lists/*
+
+# Install required packages
+RUN pip install psycopg[binary] mysql-connector-python cryptography passlib argon2-cffi
+
+# Copy validation scripts
+COPY database_functions/ /app/database_functions/
+COPY validate_db.py /app/
+
+# Set working directory
+WORKDIR /app
+
+# Set default environment variables for PostgreSQL (TEST ONLY - NOT SECURE)
+ENV DB_TYPE=postgresql
+ENV DB_HOST=postgres_db
+ENV DB_PORT=5432
+ENV DB_USER=postgres
+ENV DB_PASSWORD=test_password_123
+ENV DB_NAME=pinepods_database
+
+# Run validator
+CMD ["python", "validate_db.py", "--verbose"]
\ No newline at end of file
diff --git a/README.md b/README.md
index 4f692051..bef12c7d 100644
--- a/README.md
+++ b/README.md
@@ -36,7 +36,7 @@
# Getting Started
-PinePods is a Rust based podcast management system that manages podcasts with multi-user support and relies on a central database with clients to connect to it. It's browser based and your podcasts and settings follow you from device to device due to everything being stored on the server. You can subscribe to podcasts and even hosts for podcasts with the help of the PodPeopleDB. It works on mobile devices and can also sync with a Nextcloud server or gpodder compatible sync server so you can use external apps like Antennapod as well!
+PinePods is a Rust based podcast management system that manages podcasts with multi-user support and relies on a central database with clients to connect to it. It's browser based and your podcasts and settings follow you from device to device due to everything being stored on the server. You can subscribe to podcasts and even hosts for podcasts with the help of the PodPeopleDB. It has a native mobile app for Ios and Android and comes prebaked with it own internal gpodder server so you can use external apps like Antennapod as well!
For more information than what's provided in this repo visit the [documentation site](https://www.pinepods.online/).
@@ -46,7 +46,9 @@ For more information than what's provided in this repo visit the [documentation
## Features
-Pinepods is a complete podcast management system and allows you to play, download, and keep track of podcasts you (or any of your users) enjoy. It allows for searching and subscribing to hosts and podcasts using The Podcast Index or Itunes and provides a modern looking UI to browse through shows and episodes. In addition, Pinepods provides simple user management and can be used by multiple users at once using a browser or app version. Everything is saved into a MySQL or Postgres database including user settings, podcasts and episodes. It's fully self-hosted, open-sourced, and I provide an option to use a hosted search API or you can also get one from the Podcast Index and use your own. There's even many different themes to choose from! Everything is fully dockerized and I provide a simple guide found below explaining how to install and run Pinepods on your own system.
+Pinepods is a complete podcast management system and allows you to play, download, and keep track of podcasts you (or any of your users) enjoy. It allows for searching and subscribing to hosts and podcasts using The Podcast Index or Itunes and provides a modern looking UI to browse through shows and episodes. In addition, Pinepods provides simple user management and can be used by multiple users at once using a browser or app version. Everything is saved into a MySQL, MariaDB, or Postgres database including user settings, podcasts and episodes. It's fully self-hosted, open-sourced, and I provide an option to use a hosted search API or you can also get one from the Podcast Index and use your own. There's even many different themes to choose from! Everything is fully dockerized and I provide a simple guide found below explaining how to install and run Pinepods on your own system.
+
+There's plenty more features as well, check out the [Pinepods Site](https://www.pinepods.online/docs/Features/smart-playlists) for more!
## Try it out! :zap:
@@ -67,6 +69,13 @@ You can also choose to use MySQL/MariaDB or Postgres as your database. Examples
### Docker Compose
+> **⚠️ WARNING:** An issue was recently pointed out to me related to postgres version 18. If you run into an error that looks like this on startup when using postgres:
+
+```
+Failed to deploy a stack: compose up operation failed: Error response from daemon: failed to create task for container: failed to create shim task: OCI runtime create failed: runc create failed: unable to start container process: error during container init: error mounting "" to rootfs at "/var/lib/postgresql/data": change mount propagation through procfd: open o_path procfd: open //overlay2/17561d31d0730b3fd3071752d82cf8fe60b2ea0ed84521c6ee8b06427ca8f064/merged/var/lib/postgresql/data: no such file or directory: unknown`
+```
+> Please change your postgres tag in your compose to '17'. See [this issue](https://github.com/docker-library/postgres/issues/1363) for more details.
+
#### User Permissions
Pinepods can run with specific user permissions to ensure downloaded files are accessible on the host system. This is controlled through two environment variables:
- `PUID`: Process User ID (defaults to 1000 if not set)
@@ -83,7 +92,7 @@ id -g # Your GID
services:
db:
container_name: db
- image: postgres:latest
+ image: postgres:17
environment:
POSTGRES_DB: pinepods_database
POSTGRES_USER: postgres
@@ -91,14 +100,11 @@ services:
PGDATA: /var/lib/postgresql/data/pgdata
volumes:
- /home/user/pinepods/pgdata:/var/lib/postgresql/data
- ports:
- - "5432:5432"
restart: always
valkey:
image: valkey/valkey:8-alpine
- ports:
- - "6379:6379"
+ restart: always
pinepods:
image: madeofpendletonwool/pinepods:latest
@@ -132,6 +138,7 @@ services:
# Timezone volumes, HIGHLY optional. Read the timezone notes below
- /etc/localtime:/etc/localtime:ro
- /etc/timezone:/etc/timezone:ro
+ restart: always
depends_on:
- db
- valkey
@@ -142,7 +149,7 @@ services:
services:
db:
container_name: db
- image: mariadb:latest
+ image: mariadb:12
command: --wait_timeout=1800
environment:
MYSQL_TCP_PORT: 3306
@@ -153,14 +160,10 @@ services:
MYSQL_INIT_CONNECT: 'SET @@GLOBAL.max_allowed_packet=64*1024*1024;'
volumes:
- /home/user/pinepods/sql:/var/lib/mysql
- ports:
- - "3306:3306"
restart: always
valkey:
image: valkey/valkey:8-alpine
- ports:
- - "6379:6379"
pinepods:
image: madeofpendletonwool/pinepods:latest
@@ -223,12 +226,12 @@ Most of those are pretty obvious, but let's break a couple of them down.
#### Admin User Info
-First of all, the USERNAME, PASSWORD, FULLNAME, and EMAIL vars are your details for your default admin account. This account will have admin credentials and will be able to log in right when you start up the app. Once started you'll be able to create more users and even more admins but you need an account to kick things off on. If you don't specify credentials in the compose file it will create an account with a random password for you but I would recommend just creating one for yourself.
+First of all, the USERNAME, PASSWORD, FULLNAME, and EMAIL vars are your details for your default admin account. This account will have admin credentials and will be able to log in right when you start up the app. Once started you'll be able to create more users and even more admins but you need an account to kick things off on. If you don't specify credentials in the compose file it will prompt you to create an account before first login.
#### Note on the Search API
-Let's talk quickly about the searching API. This allows you to search for new podcasts and it queries either itunes or the podcast index for new podcasts. The podcast index requires an api key while itunes does not. If you'd rather not mess with the api at all simply set the API_URL to the one below.
+Let's talk quickly about the searching API. This allows you to search for new podcasts and it queries either itunes or the podcast index for new podcasts. It also allows for searching youtube channels via the Google Search API. The podcast index and Google Search require an api key while itunes does not. If you'd rather not mess with the api at all simply set the API_URL to the one below, however, know that Google implements a limit per day on youtube searches and the search api that I maintain below hits it's limit pretty quick. So if you're a big youtube user you might want to host your own.
```
SEARCH_API_URL: 'https://search.pinepods.online/api/search'
@@ -506,6 +509,10 @@ paru -S pinepods
#### Flatpak
+
+
+
+
You can search for Pinepods in your favorite flatpak installer gui app such as Gnome Software.
Flathub page can be found [here](https://flathub.org/apps/com.gooseberrydevelopment.pinepods)
@@ -520,7 +527,7 @@ I have had such a nightmare trying to make the snap client work. Pass, use the f
### Windows Client Install :computer:
-Any of the client additions are super easy to get going. First head over to the releases page on Github
+First head over to the releases page on Github
https://github.com/madeofpendletonwool/PinePods/releases
@@ -536,7 +543,7 @@ Once started you'll be able to sign in with your username and password. The serv
### Mac Client Install :computer:
-Any of the client additions are super easy to get going. First head over to the releases page on Github
+First head over to the releases page on Github
https://github.com/madeofpendletonwool/PinePods/releases
@@ -550,11 +557,23 @@ Once started you'll be able to sign in with your username and password. The serv
### Android Install :iphone:
-For now, it's a manual install and there are some issues with the app. Check the releases page for the latest apk.
+
+
+
+
+
+
+
+
+Currently there's options for direct downloads and Pinepods is on the IzzyOnDroid storefront! More locations coming soon!
### iOS Install :iphone:
-Coming Soon - The web app works great for phones.
+
+
+
+
+The iOS app has arrived! Enjoy!
## PodPeople DB
@@ -568,17 +587,11 @@ Finally, you can check out the Repo for it [here!](https://github.com/madeofpend
## Pinepods Firewood
-A CLI only client that can be used to remotely share your podcasts to is in the works! Check out [Pinepods Firewood!](https://github.com/madeofpendletonwool/pinepods-firewood)
+A CLI only client that can be used to remotely share your podcasts to has had it's first release! Now you can enjoy podcasts from the comfort of your terminal! Check out [Pinepods Firewood!](https://github.com/madeofpendletonwool/pinepods-firewood)
## Platform Availability
-The Intention is for this app to become available on Windows, Linux, Mac, Android, and iOS. Windows, Linux, Mac, web, and android are all currently available and working. The android app is in a sort of beta currently as I finalize any remaining issues with it. Track those [here](https://github.com/madeofpendletonwool/PinePods/issues/320). This app is built with Tauri, therefore once the Android version is in a final state there's no reason I can't just compile it to iOS as well.
-
-For a podcast sync app I recommend Opodsync, but nextcloud sync works great too! This is only required if you use an app like AntennaPods. So then your Pinepods and Antennapods sync up podcasts.
-
-[OpodSync](https://github.com/kd2org/opodsync)
-
-[Nextcloud Podcast Sync App](https://apps.nextcloud.com/apps/gpoddersync)
+The Intention is for this app to become available on Windows, Linux, Mac, Android, and iOS. Windows, Linux, Mac, web, and android are all currently available and working.
ARM devices are also supported including raspberry pis. The app is shockingly performant on a raspberry pi as well. The only limitation is that a 64bit OS is required on an ARM device. Setup is exactly the same, just use the latest tag and docker will auto pull the ARM version.
@@ -589,17 +602,12 @@ ARM devices are also supported including raspberry pis. The app is shockingly pe
- [ ] Nix Package
- [x] Aur Package
- [x] Helm Chart and repo for kubernetes deployment
-- [ ] Mobile Apps
+- [x] Mobile Apps
- [x] Android App - Beta
- [ ] Android Auto support
- - [ ] iOS App
+ - [x] iOS App
- [ ] Packaging and automation
-### Long term goals
-
-- [ ] Podcast ad blocking. Either by parsing audio blocks with ai and filtering ads or by utilizing a centralized server to allow others to send their ad block info to after determining the timestamps for ads.
-
-
## Screenshots :camera:
Main Homepage with podcasts displayed
@@ -668,4 +676,8 @@ Portions of the mobile app retain the original BSD license and attribution as re
#### 💬 Acknowledgment
-Huge thanks to Ben Hills for open-sourcing the Anytime Podcast Player. It served as a solid foundation and greatly accelerated development of PinePods.
\ No newline at end of file
+Huge thanks to Ben Hills for open-sourcing the Anytime Podcast Player. It served as a solid foundation and greatly accelerated development of PinePods.
+
+#### 🌐 Translation
+
+Translations are managed through [Weblate](https://hosted.weblate.org), a web-based translation tool that makes it easy for the community to contribute translations. If you'd like to help translate PinePods into your language, please visit our Weblate project and join the translation effort!
diff --git a/clients/clientapi.py b/clients/clientapi.py
deleted file mode 100644
index f84b5e11..00000000
--- a/clients/clientapi.py
+++ /dev/null
@@ -1,6924 +0,0 @@
-# Fast API
-from fastapi import FastAPI, WebSocket, WebSocketDisconnect, Depends, HTTPException, status, Header, Body, Path, Form, Query, \
- security, BackgroundTasks, UploadFile
-from fastapi.security import APIKeyHeader, HTTPBasic, HTTPBasicCredentials
-from fastapi.responses import PlainTextResponse, JSONResponse, Response, FileResponse, StreamingResponse, RedirectResponse
-from fastapi.middleware.cors import CORSMiddleware
-from starlette.concurrency import run_in_threadpool
-from threading import Lock
-import smtplib
-from email.mime.text import MIMEText
-from email.mime.multipart import MIMEMultipart
-from functools import lru_cache, wraps
-from yt_dlp import YoutubeDL
-import subprocess
-import threading
-
-# Needed Modules
-from passlib.context import CryptContext
-import mysql.connector
-from mysql.connector import pooling
-from time import time
-from mysql.connector.pooling import MySQLConnectionPool
-from mysql.connector import Error
-import psycopg
-from psycopg_pool import ConnectionPool
-from psycopg.rows import dict_row
-from psycopg.errors import UniqueViolation, ForeignKeyViolation, OperationalError
-import os
-import xml.etree.ElementTree as ET
-from fastapi.middleware.gzip import GZipMiddleware
-from starlette.middleware.sessions import SessionMiddleware
-from starlette.requests import Request
-import secrets
-from pydantic import BaseModel, Field, HttpUrl
-from typing import Dict, List, Any, Optional, Generator, Tuple, Set, TypedDict, Callable
-import json
-import logging
-import argparse
-import sys
-from pyotp import TOTP, random_base32
-import base64
-import traceback
-import time
-import httpx
-import asyncio
-import io
-import qrcode
-import qrcode.image.svg
-from urllib.parse import urlparse, urlunparse
-import datetime
-import feedparser
-import dateutil.parser
-import re
-import requests
-from requests.auth import HTTPBasicAuth
-from contextlib import contextmanager
-import signal
-
-def sigterm_handler(_signo, _stack_frame):
- # Perform cleanup here
- print("Received SIGTERM. Shutting down...")
- sys.exit(0)
-
-signal.signal(signal.SIGTERM, sigterm_handler)
-
-# Internal Modules
-sys.path.append('/pinepods')
-
-import database_functions.functions
-import database_functions.auth_functions
-import database_functions.app_functions
-import database_functions.import_progress
-import database_functions.oidc_state_manager
-import database_functions.valkey_client
-import database_functions.youtube
-import database_functions.tasks
-from database_functions.gpodder_router import gpodder_router
-from database_functions.db_client import create_database_connection, close_database_connection
-
-# # Use a try-except to handle potential import errors
-# try:
-# from database_functions.tasks import (
-# download_podcast_task,
-# download_youtube_video_task,
-# queue_podcast_downloads,
-# task_manager, # Changed from download_manager to task_manager
-# download_manager, # Keep this for backward compatibility
-# get_all_active_tasks, # Add this new function
-# debug_task
-# )
-# CELERY_AVAILABLE = True
-# print('celery tasks imported')
-# except ImportError as e:
-# print(f"Failed to import Celery tasks: {e}")
-# CELERY_AVAILABLE = False
-# # Define fallback functions if needed
-
-
-database_type = str(os.getenv('DB_TYPE', 'mariadb'))
-if database_type == "postgresql":
- print(f"You've selected a postgresql database.")
-else:
- print("You've selected a mariadb database")
-
-secret_key_middle = secrets.token_hex(32)
-
-# Temporary storage for MFA secrets
-temp_mfa_secrets = {}
-
-app = FastAPI()
-security = HTTPBasic()
-origins = [
- "http://localhost",
- "http://localhost:8080",
- "http://127.0.0.1:8080",
- "http://127.0.0.1",
- "*"
-]
-
-app.include_router(gpodder_router)
-
-# app.add_middleware(
-# CORSMiddleware,
-# allow_origins=origins,
-# allow_credentials=True,
-# allow_methods=["*"],
-# allow_headers=["*"],
-# )
-
-app.add_middleware(GZipMiddleware, minimum_size=1000)
-app.add_middleware(SessionMiddleware, secret_key=secret_key_middle)
-
-
-API_KEY_NAME = "pinepods_api"
-api_key_header = APIKeyHeader(name=API_KEY_NAME, auto_error=False)
-
-pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
-
-# Proxy variables
-proxy_host = os.environ.get("HOSTNAME", "localhost")
-proxy_port = os.environ.get("PINEPODS_PORT", "8040")
-proxy_protocol = os.environ.get("PROXY_PROTOCOL", "http")
-reverse_proxy = os.environ.get("REVERSE_PROXY", "False")
-
-# Podcast Index API url
-api_url = os.environ.get("SEARCH_API_URL", "https://search.pinepods.online/api/search")
-people_url = os.environ.get("PEOPLE_API_URL", "https://people.pinepods.online")
-
-# Initial Vars needed to start and used throughout
-if reverse_proxy == "True":
- proxy_url = f'{proxy_protocol}://{proxy_host}/mover/?url='
-else:
- proxy_url = f'{proxy_protocol}://{proxy_host}:{proxy_port}/mover/?url='
-
-logger = logging.getLogger(__name__)
-
-
-def get_database_connection():
- """FastAPI dependency for getting a database connection"""
- try:
- db = create_database_connection()
- yield db
- except HTTPException:
- raise # Re-raise the HTTPException to let FastAPI handle it properly
- except Exception as e:
- logger.error(f"Database connection error of type {type(e).__name__} with arguments: {e.args}")
- logger.error(traceback.format_exc())
- raise HTTPException(500, "Unable to connect to the database")
- finally:
- try:
- close_database_connection(db)
- except Exception as e:
- logger.error(f"Error in connection cleanup: {str(e)}")
-
-
-def get_api_keys(cnx):
- logging.info("Executing get_api_keys function...")
- if database_type == "postgresql":
- # Use dict_row row factory for PostgreSQL
- cnx.row_factory = dict_row
- cursor = cnx.cursor()
- query = 'SELECT * FROM "APIKeys"'
- else: # Assuming MariaDB/MySQL if not PostgreSQL
- cursor = cnx.cursor(dictionary=True)
- query = "SELECT * FROM APIKeys"
-
- try:
- cursor.execute(query)
- rows = cursor.fetchall()
- except Exception as e:
- logging.error(f"Database error: {e}")
- raise
- logging.info(f"Retrieved API keys: {rows}")
-
- cursor.close()
- return rows
-
-
-def get_api_key(request: Request, api_key: str = Depends(api_key_header),
- cnx: Generator = Depends(get_database_connection)):
- if api_key is None:
- raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="API key is missing")
-
- api_keys = get_api_keys(cnx)
-
- for api_key_entry in api_keys:
- stored_key = api_key_entry.get("APIKey".lower(), None)
- client_id = api_key_entry.get("APIKeyID".lower(), None)
-
- if api_key == stored_key: # Direct comparison instead of using Passlib
- request.session["api_key"] = api_key # Store the API key
- return client_id
-
- raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Invalid API key")
-
-
-def get_api_key_from_header(api_key: str = Header(None, name="Api-Key")):
- if not api_key:
- raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Not authenticated")
- return api_key
-
-class Web_Key:
- def __init__(self):
- self.web_key = None
-
- def get_web_key(self, cnx):
- self.web_key = database_functions.functions.get_web_key(cnx, database_type)
- return self.web_key
-
-base_webkey = Web_Key()
-
-async def initialize_web_key():
- cnx = create_database_connection()
- try:
- base_webkey.get_web_key(cnx)
- finally:
- close_database_connection(cnx)
-
-def direct_database_connection():
- """Get a direct database connection - alias for create_database_connection"""
- return create_database_connection()
-
-async def get_current_user(credentials: HTTPBasicCredentials = Depends(security)):
- # Use credentials.username and credentials.password where needed
- return credentials
-
-
-# Modify check_if_admin to handle initialization
-async def check_if_admin(api_key: str = Depends(get_api_key_from_header), cnx=Depends(get_database_connection)):
- # Initialize web key if not already set
- if base_webkey.web_key is None:
- await initialize_web_key()
-
- # Debug logging
- print(f"Checking admin access - API Key: {api_key}, Web Key: {base_webkey.web_key}")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
- if is_web_key:
- return True
-
- user_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- if not user_id:
- raise HTTPException(status_code=403, detail="Invalid API key.")
-
- if user_id == 1:
- return True
-
- is_admin = database_functions.functions.user_admin_check(cnx, database_type, user_id)
- if not is_admin:
- raise HTTPException(status_code=403, detail="User not authorized.")
-
- return True
-
-
-def check_if_admin_inner(api_key: str, cnx):
- user_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if not user_id:
- return False
- return database_functions.functions.user_admin_check(cnx, database_type, user_id)
-
-async def has_elevated_access(api_key: str, cnx):
- # Check if it's an admin
- is_admin = await run_in_threadpool(check_if_admin_inner, api_key, cnx)
- # Check if it's the web key
- web_key = base_webkey.web_key
- is_web_key = api_key == web_key
-
- return is_admin or is_web_key
-
-@app.get('/api/pinepods_check')
-async def pinepods_check():
- return {"status_code": 200, "pinepods_instance": True}
-
-
-@app.get('/api/data/verify_key')
-async def verify_key(cnx=Depends(get_database_connection), api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if is_valid_key:
- return {"status": "success"}
- else:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
-@app.get('/api/data/get_user')
-async def get_user(cnx=Depends(get_database_connection), api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if is_valid_key:
- retrieved_id = database_functions.functions.get_api_user(cnx, database_type, api_key)
- return {"status": "success", "retrieved_id": retrieved_id}
- else:
- raise HTTPException(status_code=403,
- detail="Your api-key appears to be incorrect.")
-
-@app.get('/api/data/get_key')
-async def get_key(cnx=Depends(get_database_connection),
- credentials: HTTPBasicCredentials = Depends(get_current_user)):
- is_password_valid = database_functions.auth_functions.verify_password(cnx, database_type, credentials.username.lower(), credentials.password)
- if is_password_valid:
- retrieved_key = database_functions.functions.get_api_key(cnx, database_type, credentials.username.lower())
- return {"status": "success", "retrieved_key": retrieved_key}
- else:
- raise HTTPException(status_code=403,
- detail="Your credentials appear to be incorrect.")
-
-@app.get("/api/data/config")
-async def api_config(api_key: str = Depends(get_api_key_from_header), cnx=Depends(get_database_connection)):
- global api_url, proxy_url, proxy_host, proxy_port, proxy_protocol, reverse_proxy
-
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if is_valid_key:
- return {
- "api_url": api_url,
- "proxy_url": proxy_url,
- "proxy_host": proxy_host,
- "proxy_port": proxy_port,
- "proxy_protocol": proxy_protocol,
- "reverse_proxy": reverse_proxy,
- "people_url": people_url,
- }
- else:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
-
-@app.get("/api/data/guest_status", response_model=bool)
-async def api_guest_status(cnx=Depends(get_database_connection), api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if is_valid_key:
- result = database_functions.functions.guest_status(cnx, database_type)
- return result
- else:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
-
-@app.get("/api/data/download_status", response_model=bool)
-async def api_download_status(cnx=Depends(get_database_connection), api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if is_valid_key:
- result = database_functions.functions.download_status(cnx, database_type)
- return result
- else:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
-@app.get("/api/data/return_episodes/{user_id}")
-async def api_return_episodes(user_id: int, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- # Allow the action if the API key belongs to the user, or it's the web API key
- if key_id == user_id or is_web_key:
- episodes = database_functions.functions.return_episodes(database_type, cnx, user_id)
- if episodes is None:
- episodes = [] # Return an empty list instead of raising an exception
- return {"episodes": episodes}
- else:
- raise HTTPException(status_code=403,
- detail="You can only return episodes of your own!")
-
-
-@app.get("/api/data/podcast_episodes")
-async def api_podcast_episodes(cnx=Depends(get_database_connection), api_key: str = Depends(get_api_key_from_header), user_id: int = Query(...), podcast_id: int = Query(...)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- # Allow the action if the API key belongs to the user, or it's the web API key
- if key_id == user_id or is_web_key:
- episodes = database_functions.functions.return_podcast_episodes(database_type, cnx, user_id, podcast_id)
- if episodes is None:
- episodes = [] # Return an empty list instead of raising an exception
- # logging.error(f"Episodes returned: {episodes}")
- return {"episodes": episodes}
- else:
- raise HTTPException(status_code=403,
- detail="You can only return episodes of your own!")
-
-@app.get("/api/data/home_overview")
-async def api_home_overview(
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header),
- user_id: int = Query(...)
-):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(
- status_code=403,
- detail="Your API key is either invalid or does not have correct permission"
- )
-
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if key_id == user_id or is_web_key:
- home_data = database_functions.functions.get_home_overview(database_type, cnx, user_id)
- return home_data
- else:
- raise HTTPException(
- status_code=403,
- detail="You can only view your own home overview!"
- )
-
-@app.get("/api/data/startpage")
-async def api_get_startpage(
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header),
- user_id: int = Query(...)
-):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(
- status_code=403,
- detail="Your API key is either invalid or does not have correct permission"
- )
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- if key_id == user_id or is_web_key:
- startpage = database_functions.functions.get_user_startpage(cnx, database_type, user_id)
- return {"StartPage": startpage}
- else:
- raise HTTPException(
- status_code=403,
- detail="You can only view your own StartPage setting!"
- )
-
-@app.post("/api/data/startpage")
-async def api_set_startpage(
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header),
- user_id: int = Query(...),
- startpage: str = Query(...)
-):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(
- status_code=403,
- detail="Your API key is either invalid or does not have correct permission"
- )
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- if key_id == user_id or is_web_key:
- success = database_functions.functions.set_user_startpage(cnx, database_type, user_id, startpage)
- return {"success": success, "message": "StartPage updated successfully"}
- else:
- raise HTTPException(
- status_code=403,
- detail="You can only modify your own StartPage setting!"
- )
-
-@app.get("/api/data/youtube_episodes")
-async def api_youtube_episodes(cnx=Depends(get_database_connection), api_key: str = Depends(get_api_key_from_header), user_id: int = Query(...), podcast_id: int = Query(...)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- # Allow the action if the API key belongs to the user, or it's the web API key
- if key_id == user_id or is_web_key:
- episodes = database_functions.functions.return_youtube_episodes(database_type, cnx, user_id, podcast_id)
- if episodes is None:
- episodes = [] # Return an empty list instead of raising an exception
- return {"episodes": episodes}
- else:
- raise HTTPException(status_code=403,
- detail="You can only return episodes of your own!")
-
-
-@app.get("/api/data/get_episode_id_ep_name")
-async def api_episode_id(cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header),
- user_id: int = Query(...), episode_title: str = Query(...), episode_url: str = Query(...)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- # Allow the action if the API key belongs to the user, or it's the web API key
- if key_id == user_id or is_web_key:
- print(episode_title)
- print(episode_url)
- ep_id = database_functions.functions.get_episode_id_ep_name(cnx, database_type, episode_title, episode_url)
- print(f"Episode ID: {ep_id}")
- return ep_id
- else:
- raise HTTPException(status_code=403,
- detail="You can only return pocast ids of your own podcasts!")
-
-
-@app.get("/api/data/get_podcast_id")
-async def api_podcast_id(cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header),
- user_id: int = Query(...), podcast_feed: str = Query(...), podcast_title: str = Query(...)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- # Allow the action if the API key belongs to the user, or it's the web API key
- if key_id == user_id or is_web_key:
- episodes = database_functions.functions.get_podcast_id(database_type, cnx, user_id, podcast_feed, podcast_title)
- if episodes is None:
- episodes = [] # Return an empty list instead of raising an exception
- return {"episodes": episodes}
- else:
- raise HTTPException(status_code=403,
- detail="You can only return pocast ids of your own podcasts!")
-
-@app.get("/api/data/get_podcast_id_from_ep_id")
-async def api_get_podcast_id(
- episode_id: int,
- user_id: int,
- is_youtube: bool = False, # Add optional parameter
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- logging.info('Fetching API key')
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission")
-
- is_web_key = api_key == base_webkey.web_key
- logging.info('Getting key ID')
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- logging.info(f'Got key ID: {key_id}')
-
- if key_id == user_id or is_web_key:
- podcast_id = database_functions.functions.get_podcast_id_from_episode(
- cnx, database_type, episode_id, user_id, is_youtube
- )
- if podcast_id is None:
- raise HTTPException(status_code=404, detail="Podcast ID not found for the given episode ID")
- return {"podcast_id": podcast_id}
- else:
- raise HTTPException(status_code=403, detail="You can only get podcast ID for your own episodes.")
-
-
-@app.get("/api/data/get_podcast_id_from_ep_name")
-async def api_get_podcast_id_name(episode_name: str, episode_url: str, user_id: int, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- logging.info('Fetching API key')
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
- logging.info('Getting key ID')
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- logging.info(f'Got key ID: {key_id}')
-
- # Allow the action if the API key belongs to the user or it's the web API key
- if key_id == user_id or is_web_key:
- podcast_id = database_functions.functions.get_podcast_id_from_episode_name(cnx, database_type, episode_name, episode_url, user_id)
- if podcast_id is None:
- raise HTTPException(status_code=404, detail="Podcast ID not found for the given episode name and URL")
- return {"podcast_id": podcast_id}
- else:
- raise HTTPException(status_code=403, detail="You can only get podcast ID for your own episodes.")
-
-
-@app.get("/api/data/get_podcast_details")
-async def api_podcast_details(podcast_id: str = Query(...), cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header),
- user_id: int = Query(...)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
- print('in pod details')
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- # Allow the action if the API key belongs to the user, or it's the web API key
- if key_id == user_id or is_web_key:
- details = database_functions.functions.get_podcast_details(database_type, cnx, user_id, podcast_id)
- print(f'got details {details}')
- if details is None:
- episodes = [] # Return an empty list instead of raising an exception
- return {"details": details}
- else:
- raise HTTPException(status_code=403,
- detail="You can only return pocast ids of your own podcasts!")
-
-class ClickedFeedURL(BaseModel):
- podcastid: int
- podcastname: str
- feedurl: str
- description: str
- author: str
- artworkurl: str
- explicit: bool
- episodecount: int
- categories: Optional[Dict[str, str]]
- websiteurl: str
- podcastindexid: int
- is_youtube: Optional[bool]
-
-@app.get("/api/data/get_podcast_details_dynamic", response_model=ClickedFeedURL)
-async def get_podcast_details(
- user_id: int,
- podcast_title: str,
- podcast_url: str,
- podcast_index_id: int,
- added: bool,
- display_only: bool = False,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header),
-):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Invalid API key or insufficient permissions")
- if added:
- podcast_id = database_functions.functions.get_podcast_id(database_type, cnx, user_id, podcast_url, podcast_title)
- details = database_functions.functions.get_podcast_details(database_type, cnx, user_id, podcast_id)
- if details is None:
- raise HTTPException(status_code=404, detail="Podcast not found")
-
- # Handle categories field with existence check
- categories = details.get("categories") if database_type != "postgresql" else details.get("categories")
- if not categories:
- categories_dict = {}
- elif categories.startswith('{'):
- try:
- categories = categories.replace("'", '"')
- categories_dict = json.loads(categories)
- except json.JSONDecodeError as e:
- print(f"JSON decode error: {e}")
- raise HTTPException(status_code=500, detail="Internal server error")
- else:
- categories_dict = {str(i): cat.strip() for i, cat in enumerate(categories.split(','))}
-
-
- pod_details = ClickedFeedURL(
- podcastid=0,
- podcastname=details["podcastname"],
- feedurl=details["feedurl"],
- description=details["description"],
- author=details["author"],
- artworkurl=details["artworkurl"],
- explicit=details["explicit"],
- episodecount=details["episodecount"],
- categories=categories_dict,
- websiteurl=details["websiteurl"],
- podcastindexid=details["podcastindexid"],
- is_youtube=details["isyoutubechannel"]
- )
- return pod_details
- else:
- podcast_values = database_functions.app_functions.get_podcast_values(podcast_url, user_id, None, None, display_only)
- categories = podcast_values['categories']
- print(f"heres the ep count: {podcast_values['pod_episode_count']}")
-
- if categories.startswith('{'):
- try:
- # Replace single quotes with double quotes
- categories = categories.replace("'", '"')
- categories_dict = json.loads(categories)
- except json.JSONDecodeError as e:
- print(f"JSON decode error: {e}")
- raise HTTPException(status_code=500, detail="Internal server error")
- else:
- categories_dict = {str(i): cat.strip() for i, cat in enumerate(categories.split(','))}
-
-
- return ClickedFeedURL(
- podcastid=0,
- podcastname=podcast_values['pod_title'],
- feedurl=podcast_values['pod_feed_url'],
- description=podcast_values['pod_description'],
- author=podcast_values['pod_author'],
- artworkurl=podcast_values['pod_artwork'],
- explicit=podcast_values['pod_explicit'],
- episodecount=podcast_values['pod_episode_count'],
- categories=categories_dict,
- websiteurl=podcast_values['pod_website'],
- podcastindexid=podcast_index_id,
- is_youtube=False
- )
-
-class ImportProgressResponse(BaseModel):
- current: int
- current_podcast: str
- total: int
-
-@app.get("/api/data/import_progress/{user_id}")
-async def get_import_progress(
- user_id: int,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Invalid API key")
-
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if key_id == user_id or is_web_key:
- # Fetch the import progress from the database
- current, total, current_podcast = database_functions.import_progress.import_progress_manager.get_progress(user_id)
- return ImportProgressResponse(current=current, total=total, current_podcast=current_podcast)
- else:
- raise HTTPException(status_code=403, detail="You can only fetch import progress for yourself!")
-
-class OPMLImportRequest(BaseModel):
- podcasts: List[str]
- user_id: int
-
-@app.post("/api/data/import_opml")
-async def api_import_opml(
- import_request: OPMLImportRequest,
- background_tasks: BackgroundTasks,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Invalid API key")
-
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if key_id == import_request.user_id or is_web_key:
- # Start the import process in the background
- background_tasks.add_task(process_opml_import, import_request, database_type)
- return {"success": True, "message": "Import process started"}
- else:
- raise HTTPException(status_code=403, detail="You can only import podcasts for yourself!")
-
-
-@contextmanager
-def get_db_connection():
- connection = None
- try:
- connection = create_database_connection()
- yield connection
- finally:
- if connection:
- close_database_connection(connection)
-
-def process_opml_import(import_request: OPMLImportRequest, database_type):
- total_podcasts = len(import_request.podcasts)
- database_functions.import_progress.import_progress_manager.start_import(import_request.user_id, total_podcasts)
- for index, podcast_url in enumerate(import_request.podcasts, start=1):
- try:
- with get_db_connection() as cnx:
- podcast_values = database_functions.app_functions.get_podcast_values(podcast_url, import_request.user_id, None, None, False)
- database_functions.functions.add_podcast(cnx, database_type, podcast_values, import_request.user_id, 30)
- database_functions.import_progress.import_progress_manager.update_progress(import_request.user_id, index, podcast_url)
- except Exception as e:
- print(f"Error importing podcast {podcast_url}: {str(e)}")
- # Add a small delay to allow other requests to be processed
- time.sleep(0.1)
- database_functions.import_progress.import_progress_manager.clear_progress(import_request.user_id)
-
-class PodcastFeedData(BaseModel):
- podcast_feed: str
-
-@app.get("/api/data/fetch_podcast_feed")
-async def fetch_podcast_feed(podcast_feed: str = Query(...), cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Invalid API key or insufficient permissions")
-
- # Define headers that mimic a standard web browser
- headers = {
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
- "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
- "Accept-Language": "en-US,en;q=0.5",
- "Connection": "keep-alive",
- "Upgrade-Insecure-Requests": "1",
- "Cache-Control": "max-age=0"
- }
-
- # Fetch the podcast feed data using httpx with browser-like headers
- try:
- async with httpx.AsyncClient(follow_redirects=True, timeout=30.0) as client:
- response = await client.get(podcast_feed, headers=headers)
- response.raise_for_status() # Will raise an httpx.HTTPStatusError for 4XX/5XX responses
- return Response(content=response.content, media_type="application/xml")
- except httpx.HTTPStatusError as e:
- # Add more detailed error logging
- error_message = f"HTTP error fetching podcast feed: {str(e)}"
- logging.error(error_message)
- raise HTTPException(status_code=e.response.status_code,
- detail=f"Failed to fetch podcast feed: {e.response.reason_phrase}")
- except httpx.RequestError as e:
- # Handle request errors (network issues, etc.)
- error_message = f"Request error fetching podcast feed: {str(e)}"
- logging.error(error_message)
- raise HTTPException(status_code=500, detail="Failed to fetch podcast feed due to network or connection issues")
- except Exception as e:
- # Catch-all for other unexpected errors
- error_message = f"Unexpected error fetching podcast feed: {str(e)}"
- logging.error(error_message)
- raise HTTPException(status_code=500, detail="Unexpected error occurred while fetching the podcast feed")
-
-NAMESPACE = {'podcast': 'https://podcastindex.org/namespace/1.0'}
-
-async def fetch_feed(feed_url: str) -> str:
- async with httpx.AsyncClient(follow_redirects=True) as client:
- response = await client.get(feed_url)
- response.raise_for_status()
- return response.text
-
-async def fetch_json(url: str) -> Optional[dict]:
- async with httpx.AsyncClient(follow_redirects=True) as client:
- response = await client.get(url)
- response.raise_for_status()
- return response.json()
-
-def parse_chapters(feed_content: str, audio_url: str) -> List[Dict[str, Optional[str]]]:
- chapters = []
- try:
- root = ET.fromstring(feed_content)
- episodes = root.findall('.//item')
- for episode in episodes:
- enclosure_element = episode.find('enclosure')
- enclosure_url = enclosure_element.attrib.get('url') if enclosure_element is not None else None
- if enclosure_element is not None and enclosure_url == audio_url:
- chapters_element = episode.find('podcast:chapters', NAMESPACE)
- if chapters_element is not None:
- chapters_url = chapters_element.attrib.get('url')
- if chapters_url:
- return chapters_url # Return the chapters URL to fetch the JSON
- else:
- print(f"Chapter element with missing URL: {ET.tostring(chapters_element, encoding='unicode')}")
- break # Exit loop once the matching episode is found
- except ET.ParseError as e:
- print(f"XML parsing error: {e} - Content: {feed_content[:200]}") # Log the error and first 200 characters of content
- return chapters
-
-def parse_transcripts(feed_content: str, audio_url: str) -> List[Dict[str, Optional[str]]]:
- transcripts = []
- try:
- root = ET.fromstring(feed_content)
- episodes = root.findall('.//item')
- for episode in episodes:
- enclosure_element = episode.find('enclosure')
- enclosure_url = enclosure_element.attrib.get('url') if enclosure_element is not None else None
- if enclosure_element is not None and enclosure_url == audio_url:
- transcript_elements = episode.findall('podcast:transcript', NAMESPACE)
- for transcript_element in transcript_elements:
- transcript_url = transcript_element.attrib.get('url')
- transcript_type = transcript_element.attrib.get('type')
- transcript_language = transcript_element.attrib.get('language')
- transcript_rel = transcript_element.attrib.get('rel')
- transcripts.append({
- "url": transcript_url,
- "mime_type": transcript_type,
- "language": transcript_language,
- "rel": transcript_rel
- })
- break # Exit loop once the matching episode is found
- except ET.ParseError as e:
- print(f"XML parsing error: {e} - Content: {feed_content[:200]}") # Log the error and first 200 characters of content
- return transcripts
-
-
-class TTLCache:
- def __init__(self, maxsize: int = 1000, ttl: int = 3600):
- self.maxsize = maxsize
- self.ttl = ttl
- self.cache: Dict[Tuple, Tuple[Any, float]] = {}
-
- async def get_or_set(self, key: Tuple, callback: Callable):
- current_time = time.time()
-
- # Check if key exists and hasn't expired
- if key in self.cache:
- result, timestamp = self.cache[key]
- if current_time - timestamp < self.ttl:
- return result
-
- # If we get here, either key doesn't exist or has expired
- try:
- # Await the callback here
- result = await callback()
-
- # Store new result
- self.cache[key] = (result, current_time)
-
- # Enforce maxsize by removing oldest entries
- if len(self.cache) > self.maxsize:
- oldest_key = min(self.cache.keys(), key=lambda k: self.cache[k][1])
- del self.cache[oldest_key]
-
- return result
- except Exception as e:
- logging.error(f"Error in cache callback: {e}")
- raise
-
-def async_ttl_cache(maxsize: int = 1000, ttl: int = 3600):
- cache = TTLCache(maxsize=maxsize, ttl=ttl)
-
- def decorator(func):
- @wraps(func)
- async def wrapper(*args, **kwargs):
- # Create a cache key from the function arguments
- key = (func.__name__, args, frozenset(kwargs.items()))
-
- try:
- # Create an async callback
- async def callback():
- return await func(*args, **kwargs)
-
- return await cache.get_or_set(key, callback)
- except Exception as e:
- logging.error(f"Error in cached function {func.__name__}: {e}")
- # Fall back to calling the function directly
- return await func(*args, **kwargs)
-
- return wrapper
- return decorator
-
-@async_ttl_cache(maxsize=1000, ttl=3600)
-async def get_podpeople_hosts(podcast_index_id: int) -> List[Dict[str, Optional[str]]]:
- try:
- async with httpx.AsyncClient(timeout=5.0) as client:
- url = f"{people_url}/api/hosts/{podcast_index_id}"
- response = await client.get(url)
- response.raise_for_status()
- hosts_data = response.json()
-
- if hosts_data:
- return [{
- "name": host.get("name"),
- "role": host.get("role", "Host"),
- "group": None,
- "img": host.get("img"),
- "href": host.get("link"),
- "description": host.get("description")
- } for host in hosts_data]
- except Exception as e:
- logging.error(f"Error fetching hosts: {e}")
-
- return []
-
-async def parse_people(feed_content: str, audio_url: Optional[str] = None, podcast_index_id: Optional[int] = None) -> List[Dict[str, Optional[str]]]:
- people = []
- try:
- root = ET.fromstring(feed_content)
- if audio_url:
- # Look for episode-specific people
- episodes = root.findall('.//item')
- for episode in episodes:
- enclosure_element = episode.find('enclosure')
- enclosure_url = enclosure_element.attrib.get('url') if enclosure_element is not None else None
- if enclosure_element is not None and enclosure_url == audio_url:
- person_elements = episode.findall('podcast:person', NAMESPACE)
- if person_elements:
- for person_element in person_elements:
- people.append({
- "name": person_element.text,
- "role": person_element.attrib.get('role'),
- "group": person_element.attrib.get('group'),
- "img": person_element.attrib.get('img'),
- "href": person_element.attrib.get('href'),
- })
- break
-
- if not people:
- # Fall back to channel-wide people
- person_elements = root.findall('.//channel/podcast:person', NAMESPACE)
- for person_element in person_elements:
- people.append({
- "name": person_element.text,
- "role": person_element.attrib.get('role'),
- "group": person_element.attrib.get('group'),
- "img": person_element.attrib.get('img'),
- "href": person_element.attrib.get('href'),
- })
- except ET.ParseError as e:
- logging.error(f"XML parsing error: {e} - Content: {feed_content[:200]}")
-
- # If no people found in the feed, fall back to podpeople_db
- if not people and podcast_index_id:
- # Use the async version
- people = await get_podpeople_hosts(podcast_index_id)
-
- return people
-
-@app.get("/api/data/fetch_podcasting_2_data")
-async def fetch_podcasting_2_data(
- episode_id: int,
- user_id: int,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Invalid API key or insufficient permissions")
-
- try:
- # Get all the metadata
- episode_metadata = database_functions.functions.get_episode_metadata(database_type, cnx, episode_id, user_id)
- podcast_id = database_functions.functions.get_podcast_id_from_episode(cnx, database_type, episode_id, user_id)
- podcast_feed = database_functions.functions.get_podcast_details(database_type, cnx, user_id, podcast_id)
-
- episode_url = episode_metadata['episodeurl']
- podcast_feed_url = podcast_feed['feedurl']
- podcast_index_id = database_functions.functions.get_podcast_index_id(cnx, database_type, podcast_id)
-
- # Set up common request parameters
- headers = {
- 'User-Agent': 'PinePods/1.0',
- 'Accept': 'application/xml, application/rss+xml, text/xml, application/json'
- }
-
- # Check if podcast requires authentication
- auth = None
- if podcast_feed.get('username') and podcast_feed.get('password'):
- auth = httpx.BasicAuth(
- username=podcast_feed['username'],
- password=podcast_feed['password']
- )
-
- # Fetch feed content with authentication if needed
- async with httpx.AsyncClient(timeout=10.0, follow_redirects=True) as client:
- try:
- response = await client.get(
- podcast_feed_url,
- headers=headers,
- auth=auth
- )
- response.raise_for_status()
- feed_content = response.text
- except httpx.HTTPStatusError as e:
- if e.response.status_code == 401:
- logging.error(f"Authentication failed for podcast feed: {podcast_feed_url}")
- raise HTTPException(
- status_code=401,
- detail="Authentication required or invalid credentials for podcast feed"
- )
- raise
-
- # Parse feed content
- chapters_url = parse_chapters(feed_content, episode_url)
- transcripts = parse_transcripts(feed_content, episode_url)
- people = await parse_people(feed_content, episode_url, podcast_index_id)
-
- # Get chapters if available
- chapters_data = []
- if chapters_url:
- try:
- async with httpx.AsyncClient(timeout=5.0, follow_redirects=True) as client:
- # Use same auth for chapters if it's from the same domain
- chapters_auth = auth if chapters_url.startswith(podcast_feed_url) else None
- chapters_headers = {
- 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
- 'Accept': 'application/json, text/javascript, */*; q=0.01',
- 'Accept-Language': 'en-US,en;q=0.9',
- 'Referer': podcast_feed_url
- }
- response = await client.get(
- chapters_url,
- headers=chapters_headers,
- auth=chapters_auth
- )
- response.raise_for_status()
- chapters_data = response.json().get('chapters', [])
- except Exception as e:
- logging.error(f"Error fetching chapters: {e}")
- # Continue with empty chapters rather than failing completely
-
- return {
- "chapters": chapters_data,
- "transcripts": transcripts,
- "people": people
- }
-
- except httpx.HTTPStatusError as e:
- logging.error(f"HTTP error in fetch_podcasting_2_data: {e}")
- raise HTTPException(
- status_code=e.response.status_code,
- detail=f"Error fetching podcast data: {str(e)}"
- )
- except httpx.RequestError as e:
- logging.error(f"Request error in fetch_podcasting_2_data: {e}")
- raise HTTPException(
- status_code=500,
- detail=f"Failed to fetch podcast data: {str(e)}"
- )
- except Exception as e:
- logging.error(f"Error in fetch_podcasting_2_data: {e}")
- # Return partial data if we have it
- if any(var in locals() for var in ['chapters_data', 'transcripts', 'people']):
- return {
- "chapters": locals().get('chapters_data', []),
- "transcripts": locals().get('transcripts', []),
- "people": locals().get('people', [])
- }
- raise HTTPException(status_code=500, detail=str(e))
-
-def is_valid_image_url(url: str) -> bool:
- """Validate image URL for security"""
- parsed = urlparse(url)
- # Check if URL is absolute and uses http(s)
- if not parsed.scheme or parsed.scheme not in ('http', 'https'):
- return False
- return True
-
-@app.get("/api/proxy/image")
-async def proxy_image(
- url: str = Query(..., description="URL of the image to proxy")
-):
- logging.info(f"Image proxy request received for URL: {url}")
-
- if not is_valid_image_url(url):
- logging.error(f"Invalid image URL: {url}")
- raise HTTPException(status_code=400, detail="Invalid image URL")
-
- try:
- async with httpx.AsyncClient(follow_redirects=True) as client:
- logging.info(f"Fetching image from: {url}")
- response = await client.get(url, timeout=10.0)
- logging.info(f"Image fetch response status: {response.status_code}")
- logging.info(f"Response headers: {response.headers}")
-
- response.raise_for_status()
-
- content_type = response.headers.get("Content-Type", "")
- logging.info(f"Content type: {content_type}")
-
- if not content_type.startswith(("image/", "application/octet-stream")):
- logging.error(f"Invalid content type: {content_type}")
- raise HTTPException(status_code=400, detail="URL does not point to an image")
-
- headers = {
- "Content-Type": content_type,
- "Cache-Control": "public, max-age=86400",
- "Access-Control-Allow-Origin": "*",
- "X-Content-Type-Options": "nosniff"
- }
- logging.info("Returning image response")
-
- return StreamingResponse(
- response.aiter_bytes(),
- headers=headers,
- media_type=content_type
- )
- except Exception as e:
- logging.error(f"Error in image proxy: {str(e)}")
- raise HTTPException(status_code=500, detail=str(e))
-
-
-def parse_podroll(feed_content: str) -> List[Dict[str, Optional[str]]]:
- podroll = []
- try:
- root = ET.fromstring(feed_content)
- podroll_element = root.find('.//channel/podcast:podroll', NAMESPACE)
- if podroll_element is not None:
- for remote_item in podroll_element.findall('podcast:remoteItem', NAMESPACE):
- podroll.append({
- "feed_guid": remote_item.attrib.get('feedGuid')
- })
- except ET.ParseError as e:
- logging.error(f"XML parsing error: {e} - Content: {feed_content[:200]}") # Log the error and first 200 characters of content
- return podroll
-
-def parse_funding(feed_content: str) -> List[Dict[str, Optional[str]]]:
- funding = []
- try:
- root = ET.fromstring(feed_content)
- funding_elements = root.findall('.//channel/podcast:funding', NAMESPACE)
- for funding_element in funding_elements:
- funding.append({
- "url": funding_element.attrib.get('url'),
- "description": funding_element.text
- })
- except ET.ParseError as e:
- logging.error(f"XML parsing error: {e} - Content: {feed_content[:200]}") # Log the error and first 200 characters of content
- return funding
-
-def parse_value(feed_content: str) -> List[Dict[str, Optional[str]]]:
- value = []
- try:
- root = ET.fromstring(feed_content)
- value_elements = root.findall('.//channel/podcast:value', NAMESPACE)
- for value_element in value_elements:
- value_recipients = []
- for recipient in value_element.findall('podcast:valueRecipient', NAMESPACE):
- value_recipients.append({
- "name": recipient.attrib.get('name'),
- "type": recipient.attrib.get('type'),
- "address": recipient.attrib.get('address'),
- "split": recipient.attrib.get('split')
- })
- value.append({
- "type": value_element.attrib.get('type'),
- "method": value_element.attrib.get('method'),
- "suggested": value_element.attrib.get('suggested'),
- "recipients": value_recipients
- })
- except ET.ParseError as e:
- logging.error(f"XML parsing error: {e} - Content: {feed_content[:200]}") # Log the error and first 200 characters of content
- return value
-
-def parse_hosts(feed_content: str) -> List[Dict[str, Optional[str]]]:
- people = []
- try:
- root = ET.fromstring(feed_content)
- person_elements = root.findall('.//channel/podcast:person', NAMESPACE)
- for person_element in person_elements:
- role = person_element.attrib.get('role', 'host').lower()
- if role == 'host':
- people.append({
- "name": person_element.text,
- "role": role,
- "group": person_element.attrib.get('group'),
- "img": person_element.attrib.get('img'),
- "href": person_element.attrib.get('href')
- })
- except ET.ParseError as e:
- logging.error(f"XML parsing error: {e} - Content: {feed_content[:200]}") # Log the error and first 200 characters of content
- return people
-
-async def get_podcast_hosts(cnx, database_type, podcast_id, feed_content, podcast_index_id):
- # First, try to parse hosts from the feed content
- hosts = parse_hosts(feed_content)
-
- # If no hosts found, try podpeople_db
- if not hosts:
- if podcast_index_id:
- hosts = await get_podpeople_hosts(podcast_index_id)
-
- # If still no hosts found, return a default host
- if not hosts:
- hosts = [{
- "name": "Unknown Host",
- "role": "Host",
- "description": "No host information available.",
- "img": None,
- "href": None
- }]
-
- return hosts
-
-@app.get("/api/data/fetch_podcasting_2_pod_data")
-async def fetch_podcasting_2_pod_data(podcast_id: int, user_id: int, cnx=Depends(get_database_connection), api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Invalid API key or insufficient permissions")
-
- # Fetch the podcast details including auth credentials
- podcast_feed = database_functions.functions.get_podcast_details(database_type, cnx, user_id, podcast_id)
- podcast_feed_url = podcast_feed['feedurl']
-
- # Set up HTTP client with authentication if credentials exist
- async with httpx.AsyncClient(follow_redirects=True) as client:
- headers = {
- 'User-Agent': 'PinePods/1.0',
- 'Accept': 'application/xml, application/rss+xml, text/xml'
- }
-
- # Check if podcast requires authentication
- auth = None
- if podcast_feed.get('username') and podcast_feed.get('password'):
- auth = httpx.BasicAuth(
- username=podcast_feed['username'],
- password=podcast_feed['password']
- )
-
- try:
- response = await client.get(
- podcast_feed_url,
- headers=headers,
- auth=auth,
- timeout=30.0 # Add reasonable timeout
- )
- response.raise_for_status()
- feed_content = response.text
-
- logging.info(f"Successfully fetched feed content from {podcast_feed_url}")
-
- # Parse the feed content for various metadata
- people = await get_podcast_hosts(cnx, database_type, podcast_id, feed_content, podcast_feed['podcastindexid'])
- podroll = parse_podroll(feed_content)
- funding = parse_funding(feed_content)
- value = parse_value(feed_content)
-
- logging.debug(f"Parsed metadata - People: {len(people) if people else 0} entries")
-
- return {
- "people": people,
- "podroll": podroll,
- "funding": funding,
- "value": value
- }
-
- except httpx.HTTPStatusError as e:
- if e.response.status_code == 401:
- logging.error(f"Authentication failed for podcast feed: {podcast_feed_url}")
- raise HTTPException(
- status_code=401,
- detail="Authentication required or invalid credentials for podcast feed"
- )
- raise HTTPException(
- status_code=e.response.status_code,
- detail=f"Error fetching podcast feed: {str(e)}"
- )
- except httpx.RequestError as e:
- logging.error(f"Request error fetching podcast feed: {str(e)}")
- raise HTTPException(
- status_code=500,
- detail=f"Failed to fetch podcast feed: {str(e)}"
- )
- except Exception as e:
- logging.error(f"Unexpected error processing podcast feed: {str(e)}")
- raise HTTPException(
- status_code=500,
- detail=f"Error processing podcast feed: {str(e)}"
- )
-
-
-class PodcastResponse(BaseModel):
- podcastid: int
- podcastname: str
- feedurl: str
-
-class PodPeopleResponse(BaseModel):
- success: bool
- podcasts: List[PodcastResponse]
-
-@app.get("/api/data/podpeople/host_podcasts")
-async def get_host_podcasts(
- hostname: str,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- """
- Get podcasts associated with a host from the podpeople database.
- """
- # Verify API key
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Invalid API key or insufficient permissions")
-
- try:
- # Make request to podpeople database
- async with httpx.AsyncClient(follow_redirects=True) as client:
- logging.info(f"Making request to {people_url}/api/hostsearch?name={hostname}")
- response = await client.get(
- f"{people_url}/api/hostsearch", # Changed this line to match working endpoint
- params={"name": hostname}
- )
- response.raise_for_status()
- podpeople_data = response.json()
-
- logging.info(f"Received response from podpeople: {podpeople_data}")
-
- # Transform the podpeople response into our expected format
- podcasts = []
- if podpeople_data.get("success") and podpeople_data.get("podcasts"):
- for podcast in podpeople_data["podcasts"]:
- podcasts.append({
- 'podcastid': podcast['id'],
- 'podcastname': podcast['title'],
- 'feedurl': podcast['feed_url']
- })
-
- logging.info(f"Transformed response: {podcasts}")
-
- return PodPeopleResponse(
- success=True,
- podcasts=podcasts
- )
-
- except httpx.HTTPStatusError as e:
- logging.error(f"HTTP error from podpeople: {str(e)}")
- raise HTTPException(
- status_code=e.response.status_code,
- detail=f"Error from podpeople service: {str(e)}"
- )
- except httpx.RequestError as e:
- logging.error(f"Error connecting to podpeople: {str(e)}")
- raise HTTPException(
- status_code=500,
- detail=f"Error connecting to podpeople service: {str(e)}"
- )
- except Exception as e:
- logging.error(f"Unexpected error: {str(e)}")
- raise HTTPException(
- status_code=500,
- detail=f"Unexpected error: {str(e)}"
- )
-
-@app.post("/api/data/check_episode_playback")
-async def api_check_episode_playback(
- user_id: int = Form(...),
- episode_title: Optional[str] = Form(None),
- episode_url: Optional[str] = Form(None),
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- # Allow the action if the API key belongs to the user or it's the web API key
- if key_id == user_id or is_web_key:
- logging.info(f"Received: user_id={user_id}, episode_title={episode_title}, episode_url={episode_url}")
-
- has_playback, listen_duration = database_functions.functions.check_episode_playback(
- cnx, database_type, user_id, episode_title, episode_url
- )
- if has_playback:
- logging.info("Playback found, listen_duration={}".format(listen_duration))
- return {"has_playback": True, "listen_duration": listen_duration}
- else:
- logging.info("No playback found")
- return {"has_playback": False, "listen_duration": 0}
- else:
- raise HTTPException(status_code=403,
- detail="You can only check playback for yourself!")
-
-
-@app.get("/api/data/user_details_id/{user_id}")
-async def api_get_user_details_id(user_id: int, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
-
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- elevated_access = await has_elevated_access(api_key, cnx)
-
- if not elevated_access:
- # Get user ID from API key
- user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if user_id != user_id_from_api_key:
- raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
- detail="You are not authorized to access these user details")
- result = database_functions.functions.get_user_details_id(cnx, database_type, user_id)
- if result:
- return result
- else:
- raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found")
-
-
-@app.get("/api/data/get_theme/{user_id}")
-async def api_get_theme(user_id: int, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- # Allow the action if the API key belongs to the user, or it's the web API key
- if key_id == user_id or is_web_key:
- theme = database_functions.functions.get_theme(cnx, database_type, user_id)
- return {"theme": theme}
- else:
- raise HTTPException(status_code=403,
- detail="You can only get themes for yourself!")
-
-
-class PodcastValuesModel(BaseModel):
- pod_title: str
- pod_artwork: str
- pod_author: str
- categories: dict
- pod_description: str
- pod_episode_count: int
- pod_feed_url: str
- pod_website: str
- pod_explicit: bool
- user_id: int
-
-class AddPodcastRequest(BaseModel):
- podcast_values: PodcastValuesModel
- podcast_index_id: int = Field(default=0)
-
-@app.post("/api/data/add_podcast")
-async def api_add_podcast(
- request: AddPodcastRequest,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if key_id == request.podcast_values.user_id or is_web_key:
- if database_functions.functions.check_gpodder_settings(database_type, cnx, request.podcast_values.user_id):
- gpodder_url, gpodder_token, gpodder_login = database_functions.functions.get_nextcloud_settings(database_type, cnx, request.podcast_values.user_id)
- gpod_type = database_functions.functions.get_gpodder_type(cnx, database_type, request.podcast_values.user_id)
-
- if gpod_type == "gpodder":
- default_device = database_functions.functions.get_default_gpodder_device(cnx, database_type, request.podcast_values.user_id)
- device_name = default_device["name"] if default_device else f"pinepods-internal-{request.podcast_values.user_id}"
-
- if gpod_type == "nextcloud":
- database_functions.functions.add_podcast_to_nextcloud(cnx, database_type, gpodder_url, gpodder_login, gpodder_token, request.podcast_values.pod_feed_url)
- else:
- database_functions.functions.add_podcast_to_opodsync(cnx, database_type, request.podcast_values.user_id, gpodder_url, gpodder_login, gpodder_token, request.podcast_values.pod_feed_url, device_name)
-
- result = database_functions.functions.add_podcast(
- cnx,
- database_type,
- request.podcast_values.dict(),
- request.podcast_values.user_id,
- 30,
- podcast_index_id=request.podcast_index_id
- )
-
- if isinstance(result, tuple):
- podcast_id, first_episode_id = result
- else:
- podcast_id = result
- first_episode_id = None # Or fetch it if needed
-
- if podcast_id:
- return {"success": True, "podcast_id": podcast_id, "first_episode_id": first_episode_id}
- else:
- raise HTTPException(status_code=403,
- detail="You can only add podcasts for yourself!")
-
-@app.post("/api/data/enable_disable_guest")
-async def api_enable_disable_guest(is_admin: bool = Depends(check_if_admin), cnx=Depends(get_database_connection)):
- database_functions.functions.enable_disable_guest(cnx, database_type)
- return {"success": True}
-
-
-@app.post("/api/data/enable_disable_downloads")
-async def api_enable_disable_downloads(is_admin: bool = Depends(check_if_admin), cnx=Depends(get_database_connection)):
- database_functions.functions.enable_disable_downloads(cnx, database_type)
- return {"success": True}
-
-
-@app.post("/api/data/enable_disable_self_service")
-async def api_enable_disable_self_service(is_admin: bool = Depends(check_if_admin),
- cnx=Depends(get_database_connection)):
- database_functions.functions.enable_disable_self_service(cnx, database_type)
- return {"success": True}
-
-
-@app.get("/api/data/self_service_status")
-async def api_self_service_status(cnx=Depends(get_database_connection)):
- status = database_functions.functions.self_service_status(cnx, database_type)
- # Return status directly without wrapping it in another dict
- return status # Instead of {"status": status}
-
-class FirstAdminRequest(BaseModel):
- username: str
- password: str
- email: str
- fullname: str
-
-
-
-@app.post("/api/data/create_first")
-async def create_first_admin(
- request: FirstAdminRequest,
- background_tasks: BackgroundTasks,
- cnx=Depends(get_database_connection)
-):
- if database_functions.functions.check_admin_exists(cnx, database_type):
- raise HTTPException(
- status_code=403,
- detail="An admin user already exists"
- )
- try:
- user_id = database_functions.functions.add_admin_user(
- cnx,
- database_type,
- (request.fullname, request.username.lower(), request.email, request.password)
- )
-
- background_tasks.add_task(run_startup_tasks_background)
- return {"message": "Admin user created successfully", "user_id": user_id}
- except Exception as e:
- raise HTTPException(
- status_code=500,
- detail=str(e)
- )
-
-def run_startup_tasks_background():
- cnx = create_database_connection()
- try:
- with open("/tmp/web_api_key.txt", "r") as f:
- web_key = f.read().strip()
- init_request = InitRequest(api_key=web_key)
- # Execute startup tasks directly instead of calling the endpoint
- is_valid = database_functions.functions.verify_api_key(cnx, database_type, web_key)
- is_web_key = web_key == base_webkey.web_key
- if not is_valid or not is_web_key:
- raise Exception("Invalid web key")
- database_functions.functions.add_news_feed_if_not_added(database_type, cnx)
- except Exception as e:
- logger.error(f"Background startup tasks failed: {e}")
- finally:
- close_database_connection(cnx)
-
-@app.put("/api/data/increment_listen_time/{user_id}")
-async def api_increment_listen_time(user_id: int, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- # Allow the action if the API key belongs to the user, or it's the web API key
- if key_id == user_id or is_web_key:
- database_functions.functions.increment_listen_time(cnx, database_type, user_id)
- return {"detail": "Listen time incremented."}
- else:
- raise HTTPException(status_code=403,
- detail="You can only increment your own listen time.")
-
-
-@app.put("/api/data/increment_played/{user_id}")
-async def api_increment_played(user_id: int, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- # Allow the action if the API key belongs to the user or it's the web API key
- if key_id == user_id or is_web_key:
- database_functions.functions.increment_played(cnx, database_type, user_id)
- return {"detail": "Played count incremented."}
- else:
- raise HTTPException(status_code=403,
- detail="You can only increment your own play count.")
-
-
-class RecordHistoryData(BaseModel):
- episode_id: int
- user_id: int
- episode_pos: float
- is_youtube: bool = False # Default to False for backward compatibility
-
-@app.post("/api/data/record_podcast_history")
-async def api_record_podcast_history(data: RecordHistoryData, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if key_id == data.user_id or is_web_key:
- database_functions.functions.record_podcast_history(
- cnx,
- database_type,
- data.episode_id,
- data.user_id,
- data.episode_pos,
- data.is_youtube
- )
- return {"detail": "History recorded successfully."}
- else:
- raise HTTPException(status_code=403,
- detail="You can only record history for yourself!")
-
-
-class GetEpisodeIdRequest(BaseModel):
- podcast_id: int
- user_id: int
- is_youtube: bool = False # Add default False
-
-
-@app.post("/api/data/get_episode_id")
-async def api_get_episode_id(data: GetEpisodeIdRequest, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission")
-
- episode_id = database_functions.functions.get_first_episode_id(
- cnx,
- database_type,
- data.podcast_id,
- data.user_id,
- data.is_youtube
- )
-
- if episode_id is None:
- raise HTTPException(status_code=404, detail="No episodes found for this podcast.")
- return {"episode_id": episode_id}
-
-
-
-class DownloadPodcastData(BaseModel):
- episode_id: int
- user_id: int
- is_youtube: bool = False # Default to False for backward compatibility
-
-@app.post("/api/data/download_podcast")
-async def api_download_podcast(
- data: DownloadPodcastData,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- """
- Queue a single episode or YouTube video for download.
- This uses the Celery task queue to handle the download asynchronously.
- """
- # Validate API key
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(
- status_code=403,
- detail="Your API key is either invalid or does not have correct permission"
- )
- # Check permissions
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- if key_id != data.user_id and not is_web_key:
- raise HTTPException(
- status_code=403,
- detail="You can only download content for yourself!"
- )
- try:
- # Check if already downloaded
- is_downloaded = database_functions.functions.check_downloaded(
- cnx,
- database_type,
- data.user_id,
- data.episode_id,
- data.is_youtube
- )
- if is_downloaded:
- return {"detail": "Content already downloaded."}
- # Queue the appropriate download task
- if data.is_youtube:
- task = database_functions.tasks.download_youtube_video_task.delay(data.episode_id, data.user_id, database_type)
- content_type = "YouTube video"
- else:
- task = database_functions.tasks.download_podcast_task.delay(data.episode_id, data.user_id, database_type)
- content_type = "Podcast episode"
- return {
- "detail": f"{content_type} download has been queued and will process in the background.",
- "task_id": task.id
- }
- except Exception as e:
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=f"Error queueing download: {str(e)}"
- )
-
-def download_content_fun(episode_id: int, user_id: int, is_youtube: bool):
- cnx = create_database_connection()
- try:
- if is_youtube:
- database_functions.functions.download_youtube_video(cnx, database_type, episode_id, user_id)
- else:
- database_functions.functions.download_podcast(cnx, database_type, episode_id, user_id)
- finally:
- cnx.close()
-
-
-class DownloadAllPodcastData(BaseModel):
- podcast_id: int
- user_id: int
- is_youtube: bool = False
-
-# Updated API endpoint using Celery for mass downloads
-@app.post("/api/data/download_all_podcast")
-async def api_download_all_podcast(
- data: DownloadAllPodcastData,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- """
- Queue all episodes of a podcast or videos of a YouTube channel for download.
- Uses a Celery task queue to process downloads in the background without blocking the server.
- """
- # Validate API key
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(
- status_code=403,
- detail="Your API key is either invalid or does not have correct permission"
- )
-
- # Check permissions
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if key_id != data.user_id and not is_web_key:
- raise HTTPException(
- status_code=403,
- detail="You can only download content for yourself!"
- )
-
- try:
- # Verify the podcast/channel exists
- if data.is_youtube:
- # Check if channel exists
- videos = database_functions.functions.get_video_ids_for_podcast(
- cnx, database_type, data.podcast_id
- )
- if not videos:
- return {"detail": "No videos found for the given YouTube channel."}
- else:
- # Check if podcast exists
- episodes = database_functions.functions.get_episode_ids_for_podcast(
- cnx, database_type, data.podcast_id
- )
- if not episodes:
- return {"detail": "No episodes found for the given podcast."}
-
- # Queue the download task using Celery
- task = database_functions.tasks.queue_podcast_downloads.delay(
- data.podcast_id,
- data.user_id,
- database_type,
- data.is_youtube
- )
-
- return {
- "detail": f"{'YouTube channel' if data.is_youtube else 'Podcast'} download has been queued. "
- "Episodes will be downloaded in the background.",
- "task_id": task.id
- }
-
- except Exception as e:
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=f"Error queueing downloads: {str(e)}"
- )
-
-@app.get("/api/data/download_status/{user_id}")
-async def api_download_status(
- user_id: int,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- """
- Get the status of all active downloads for a user.
- """
- # Validate API key
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(
- status_code=403,
- detail="Your API key is either invalid or does not have correct permission"
- )
-
- # Check permissions
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if key_id != user_id and not is_web_key:
- raise HTTPException(
- status_code=403,
- detail="You can only view your own downloads!"
- )
-
- try:
- # Get all active downloads for the user
- downloads = database_functions.tasks.download_manager.get_user_downloads(user_id)
-
- # Return the downloads
- return {
- "downloads": downloads,
- "count": len(downloads)
- }
-
- except Exception as e:
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail=f"Error retrieving download status: {str(e)}"
- )
-
-class DeletePodcastData(BaseModel):
- episode_id: int
- user_id: int
- is_youtube: bool = False # Default to False for backward compatibility
-
-@app.post("/api/data/delete_episode")
-async def api_delete_podcast(data: DeletePodcastData, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if key_id == data.user_id or is_web_key:
- database_functions.functions.delete_episode(database_type, cnx, data.episode_id,
- data.user_id, data.is_youtube)
- return {"detail": "Episode(s) Deleted"}
- else:
- raise HTTPException(status_code=403,
- detail="You can only delete content for yourself!")
-
-class MarkEpisodeCompletedData(BaseModel):
- episode_id: int
- user_id: int
- is_youtube: bool = False # Added field with default False
-
-@app.post("/api/data/mark_episode_completed")
-async def api_mark_episode_completed(data: MarkEpisodeCompletedData, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- # Allow the action if the API key belongs to the user or it's the web API key
- if key_id == data.user_id or is_web_key:
- database_functions.functions.mark_episode_completed(
- cnx,
- database_type,
- data.episode_id,
- data.user_id,
- data.is_youtube
- )
- return {"detail": "Episode marked as completed."}
- else:
- raise HTTPException(status_code=403,
- detail="You can only mark episodes as completed for yourself.")
-
-@app.post("/api/data/mark_episode_uncompleted")
-async def api_mark_episode_uncompleted(data: MarkEpisodeCompletedData, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if key_id == data.user_id or is_web_key:
- database_functions.functions.mark_episode_uncompleted(
- cnx,
- database_type,
- data.episode_id,
- data.user_id,
- data.is_youtube
- )
- return {"detail": "Episode marked as uncompleted."}
- else:
- raise HTTPException(status_code=403,
- detail="You can only mark episodes as uncompleted for yourself.")
-
-class AutoDownloadRequest(BaseModel):
- podcast_id: int
- auto_download: bool
- user_id: int
-
-@app.post("/api/data/enable_auto_download")
-async def api_enable_auto_download(data: AutoDownloadRequest, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if key_id == data.user_id:
- database_functions.functions.enable_auto_download(cnx, database_type, data.podcast_id, data.user_id, data.auto_download)
- return {"detail": "Auto-download status updated."}
- else:
- raise HTTPException(status_code=403, detail="You can only modify your own podcasts.")
-
-class AutoDownloadStatusRequest(BaseModel):
- podcast_id: int
- user_id: int
-
-class AutoDownloadStatusResponse(BaseModel):
- auto_download: bool
-
-@app.post("/api/data/get_auto_download_status")
-async def api_get_auto_download_status(data: AutoDownloadStatusRequest, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission")
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- if key_id != data.user_id:
- raise HTTPException(status_code=403, detail="You can only get the status for your own podcast.")
-
- status = database_functions.functions.call_get_auto_download_status(cnx, database_type, data.podcast_id, data.user_id)
- if status is None:
- raise HTTPException(status_code=404, detail="Podcast not found")
-
- return AutoDownloadStatusResponse(auto_download=status)
-
-class SkipTimesRequest(BaseModel):
- podcast_id: int
- start_skip: Optional[int] = 0
- end_skip: Optional[int] = 0
- user_id: int
-
-@app.post("/api/data/adjust_skip_times")
-async def api_adjust_skip_times(data: SkipTimesRequest, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if key_id == data.user_id or is_web_key:
- database_functions.functions.adjust_skip_times(cnx, database_type, data.podcast_id, data.start_skip, data.end_skip)
- return {"detail": "Skip times updated."}
- else:
- raise HTTPException(status_code=403, detail="You can only modify your own podcasts.")
-
-class AutoSkipTimesRequest(BaseModel):
- podcast_id: int
- user_id: int
-
-class AutoSkipTimesResponse(BaseModel):
- start_skip: int
- end_skip: int
-
-@app.post("/api/data/get_auto_skip_times")
-async def api_get_auto_skip_times(data: AutoSkipTimesRequest, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission")
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- if key_id != data.user_id:
- raise HTTPException(status_code=403, detail="You can only get the skip times for your own podcast.")
-
- start_skip, end_skip = database_functions.functions.get_auto_skip_times(cnx, database_type, data.podcast_id, data.user_id)
- if start_skip is None or end_skip is None:
- raise HTTPException(status_code=404, detail="Podcast not found")
-
- return AutoSkipTimesResponse(start_skip=start_skip, end_skip=end_skip)
-
-class PlayEpisodeDetailsRequest(BaseModel):
- podcast_id: int
- user_id: int
- is_youtube: bool = False
-
-class PlayEpisodeDetailsResponse(BaseModel):
- playback_speed: float
- start_skip: int
- end_skip: int
-
-@app.post("/api/data/get_play_episode_details")
-async def api_get_play_episode_details(data: PlayEpisodeDetailsRequest,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if key_id == data.user_id or is_web_key:
- # Get all details in one function call
- playback_speed, start_skip, end_skip = database_functions.functions.get_play_episode_details(
- cnx,
- database_type,
- data.user_id,
- data.podcast_id,
- data.is_youtube
- )
-
- return PlayEpisodeDetailsResponse(
- playback_speed=playback_speed,
- start_skip=start_skip,
- end_skip=end_skip
- )
- else:
- raise HTTPException(status_code=403,
- detail="You can only get metadata for yourself!")
-
-class ClearPlaybackSpeedRequest(BaseModel):
- podcast_id: int
- user_id: int
-
-@app.post("/api/data/clear_podcast_playback_speed")
-async def api_clear_podcast_playback_speed(data: ClearPlaybackSpeedRequest,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- if key_id != data.user_id:
- raise HTTPException(status_code=403,
- detail="You can only modify your own podcast settings!")
-
- success = database_functions.functions.clear_podcast_playback_speed(
- cnx,
- database_type,
- data.podcast_id,
- data.user_id
- )
-
- if success:
- return {"message": "Playback speed cleared successfully"}
- else:
- raise HTTPException(status_code=500, detail="Failed to clear playback speed")
-
-class SetPlaybackSpeedPodcast(BaseModel):
- user_id: int
- podcast_id: int
- playback_speed: float
-
-class SetPlaybackSpeedUser(BaseModel):
- user_id: int
- playback_speed: float
-
-@app.post("/api/data/podcast/set_playback_speed")
-async def api_set_playback_speed_podcast(data: SetPlaybackSpeedPodcast, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission")
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- if key_id == data.user_id or is_web_key:
- database_functions.functions.set_playback_speed_podcast(cnx, database_type, data.podcast_id, data.playback_speed)
- return {"detail": "Default podcast playback speed updated."}
- else:
- raise HTTPException(status_code=403, detail="You can only modify your own podcasts.")
-
-@app.post("/api/data/user/set_playback_speed")
-async def api_set_playback_speed_user(data: SetPlaybackSpeedUser, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission")
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- if key_id == data.user_id or is_web_key:
- database_functions.functions.set_playback_speed_user(cnx, database_type, data.user_id, data.playback_speed)
- return {"detail": "Default playback speed updated."}
- else:
- raise HTTPException(status_code=403, detail="You can only modify your own settings.")
-
-
-class SaveEpisodeData(BaseModel):
- episode_id: int
- user_id: int
- is_youtube: bool = False
-
-@app.post("/api/data/save_episode")
-async def api_save_episode(data: SaveEpisodeData, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if key_id == data.user_id or is_web_key:
- ep_status = database_functions.functions.check_saved(
- cnx, database_type, data.user_id, data.episode_id, data.is_youtube
- )
- if ep_status:
- return {"detail": f"{'Video' if data.is_youtube else 'Episode'} already saved."}
- else:
- success = database_functions.functions.save_episode(
- cnx, database_type, data.episode_id, data.user_id, data.is_youtube
- )
- if success:
- return {"detail": f"{'Video' if data.is_youtube else 'Episode'} saved!"}
- else:
- raise HTTPException(status_code=400, detail=f"Error saving {'video' if data.is_youtube else 'episode'}.")
- else:
- raise HTTPException(status_code=403,
- detail=f"You can only save {'videos' if data.is_youtube else 'episodes'} of your own!")
-
-class RemoveSavedEpisodeData(BaseModel):
- episode_id: int
- user_id: int
- is_youtube: bool = False
-
-@app.post("/api/data/remove_saved_episode")
-async def api_remove_saved_episode(data: RemoveSavedEpisodeData, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if is_valid_key:
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- if key_id == data.user_id:
- database_functions.functions.remove_saved_episode(
- cnx, database_type, data.episode_id, data.user_id, data.is_youtube
- )
- return {"detail": f"Saved {'video' if data.is_youtube else 'episode'} removed."}
- else:
- raise HTTPException(status_code=403,
- detail=f"You can only remove {'videos' if data.is_youtube else 'episodes'} of your own!")
- else:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
-
-class AddCategoryData(BaseModel):
- podcast_id: int
- user_id: int
- category: str
-
-@app.post("/api/data/add_category")
-async def api_add_category(data: AddCategoryData, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- # Allow the action if the API key belongs to the user or it's the web API key
- if key_id == data.user_id or is_web_key:
- existing_categories = database_functions.functions.get_categories(cnx, database_type, data.podcast_id, data.user_id)
- if data.category in existing_categories:
- return {"detail": "Category already exists."}
- else:
- success = database_functions.functions.add_category(cnx, database_type, data.podcast_id, data.user_id, data.category)
- if success:
- return {"detail": "Category added!"}
- else:
- raise HTTPException(status_code=400, detail="Error adding category.")
- else:
- raise HTTPException(status_code=403, detail="You can only modify categories of your own podcasts!")
-
-class RemoveCategoryData(BaseModel):
- podcast_id: int
- user_id: int
- category: str
-
-@app.post("/api/data/remove_category")
-async def api_remove_category(data: RemoveCategoryData, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if is_valid_key:
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- if key_id == data.user_id:
- database_functions.functions.remove_category(cnx, database_type, data.podcast_id, data.user_id, data.category)
- return {"detail": "Category removed."}
- else:
- raise HTTPException(status_code=403,
- detail="You can only modify categories of your own podcasts!")
- else:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
-class UpdateFeedCutoffDaysData(BaseModel):
- podcast_id: int
- user_id: int
- feed_cutoff_days: int
-
-@app.post("/api/data/update_feed_cutoff_days")
-async def api_update_feed_cutoff_days(data: UpdateFeedCutoffDaysData, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- # Allow the action if the API key belongs to the user or it's the web API key
- if key_id == data.user_id or is_web_key:
- success = database_functions.functions.update_feed_cutoff_days(cnx, database_type, data.podcast_id, data.user_id, data.feed_cutoff_days)
- if success:
- return {"detail": "Feed cutoff days updated successfully!"}
- else:
- raise HTTPException(status_code=400, detail="Error updating feed cutoff days.")
- else:
- raise HTTPException(status_code=403, detail="You can only modify settings of your own podcasts!")
-
-@app.get("/api/data/get_feed_cutoff_days")
-async def api_get_feed_cutoff_days(podcast_id: int, user_id: int, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- # Allow the action if the API key belongs to the user or it's the web API key
- if key_id == user_id or is_web_key:
- feed_cutoff_days = database_functions.functions.get_feed_cutoff_days(cnx, database_type, podcast_id, user_id)
- if feed_cutoff_days is not None:
- return {"podcast_id": podcast_id, "user_id": user_id, "feed_cutoff_days": feed_cutoff_days}
- else:
- raise HTTPException(status_code=404, detail="Podcast not found or does not belong to the user.")
- else:
- raise HTTPException(status_code=403, detail="You can only access settings of your own podcasts!")
-
-class TogglePodcastNotificationData(BaseModel):
- user_id: int
- podcast_id: int
- enabled: bool
-
-@app.put("/api/data/podcast/toggle_notifications")
-async def api_toggle_podcast_notifications(
- data: TogglePodcastNotificationData,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Invalid API key")
-
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if key_id == data.user_id or is_web_key:
- success = database_functions.functions.toggle_podcast_notifications(
- cnx,
- database_type,
- data.podcast_id,
- data.user_id,
- data.enabled
- )
- if success:
- return {"detail": "Podcast notification settings updated successfully"}
- else:
- raise HTTPException(status_code=400, detail="Error updating podcast notification settings")
- else:
- raise HTTPException(status_code=403, detail="You can only modify your own podcast settings")
-
-class SetPodcastFeedCutoff(BaseModel):
- user_id: int
- podcast_id: int
- feed_cutoff: int
-
-@app.put("/api/data/podcast/set_feed_cutoff")
-async def api_toggle_podcast_notifications(
- data: SetPodcastFeedCutoff,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Invalid API key")
-
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if key_id == data.user_id or is_web_key:
- success = database_functions.functions.set_feed_cutoff(
- cnx,
- database_type,
- data.podcast_id,
- data.user_id,
- data.feed_cutoff
- )
- if success:
- return {"detail": "Podcast feed cutoff setting updated successfully"}
- else:
- raise HTTPException(status_code=400, detail="Error updating podcast nfeed cutoff")
- else:
- raise HTTPException(status_code=403, detail="You can only modify your own podcast settings")
-
-class PodcastNotificationStatusData(BaseModel):
- user_id: int
- podcast_id: int
-
-@app.post("/api/data/podcast/notification_status")
-async def api_get_podcast_notification_status(
- data: PodcastNotificationStatusData,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Invalid API key")
-
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if key_id == data.user_id or is_web_key:
- enabled = database_functions.functions.get_podcast_notification_status(
- cnx,
- database_type,
- data.podcast_id,
- data.user_id
- )
- return {"enabled": enabled}
- else:
- raise HTTPException(status_code=403, detail="You can only check your own podcast settings")
-
-class NotificationSettingsData(BaseModel):
- user_id: int
- platform: str
- enabled: bool
- ntfy_topic: Optional[str]
- ntfy_server_url: Optional[str]
- gotify_url: Optional[str]
- gotify_token: Optional[str]
-
-@app.get("/api/data/user/notification_settings")
-async def api_get_notification_settings(user_id: int, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Invalid API key")
-
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if key_id == user_id or is_web_key:
- settings = database_functions.functions.get_notification_settings(cnx, database_type, user_id)
- return {"settings": settings}
- else:
- raise HTTPException(status_code=403, detail="You can only access your own notification settings")
-
-@app.put("/api/data/user/notification_settings")
-async def api_update_notification_settings(data: NotificationSettingsData, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Invalid API key")
-
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if key_id == data.user_id or is_web_key:
- success = database_functions.functions.update_notification_settings(
- cnx,
- database_type,
- data.user_id,
- data.platform,
- data.enabled,
- data.ntfy_topic,
- data.ntfy_server_url,
- data.gotify_url,
- data.gotify_token
- )
- if success:
- return {"detail": "Notification settings updated successfully"}
- else:
- raise HTTPException(status_code=400, detail="Error updating notification settings")
- else:
- raise HTTPException(status_code=403, detail="You can only modify your own notification settings")
-
-class NotificationTestRequest(BaseModel):
- user_id: int
- platform: str
-
-@app.post("/api/data/user/test_notification")
-async def api_test_notification(
- data: NotificationTestRequest,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Invalid API key")
-
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if key_id == data.user_id or is_web_key:
- success = database_functions.functions.send_test_notification(
- cnx,
- database_type,
- data.user_id,
- data.platform
- )
- if success:
- return {"detail": "Test notification sent successfully"}
- else:
- raise HTTPException(status_code=400, detail="Error sending test notification")
- else:
- raise HTTPException(status_code=403, detail="You can only send test notifications to your own account")
-
-class RecordListenDurationData(BaseModel):
- episode_id: int
- user_id: int
- listen_duration: float
- is_youtube: Optional[bool] = False
-
-
-@app.post("/api/data/record_listen_duration")
-async def get(data: RecordListenDurationData, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- # Ignore listen duration for episodes with ID 0
- if data.episode_id == 0:
- return {"detail": "Listen duration for episode ID 0 is ignored."}
-
- # Continue as normal for all other episode IDs
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if key_id == data.user_id or is_web_key:
- if data.is_youtube:
- database_functions.functions.record_youtube_listen_duration(cnx, database_type, data.episode_id, data.user_id, data.listen_duration)
- else:
- database_functions.functions.record_listen_duration(cnx, database_type, data.episode_id, data.user_id, data.listen_duration)
- return {"detail": "Listen duration recorded."}
- else:
- raise HTTPException(status_code=403, detail="You can only record your own listen duration")
-
-
-@app.get("/api/data/refresh_pods")
-async def api_refresh_pods(background_tasks: BackgroundTasks, is_admin: bool = Depends(check_if_admin)):
- background_tasks.add_task(refresh_pods_task)
- return {"detail": "Refresh initiated."}
-
-def refresh_pods_task():
- cnx = create_database_connection()
- try:
- database_functions.functions.refresh_pods(cnx, database_type)
- finally:
- close_database_connection(cnx)
-
-# Store locks per user to prevent concurrent refresh jobs
-user_locks = {}
-
-# Store active WebSocket connections
-active_websockets = {}
-
-@app.websocket("/ws/api/data/episodes/{user_id}")
-async def websocket_endpoint(websocket: WebSocket, user_id: int, cnx=Depends(get_database_connection), nextcloud_refresh: bool = Query(False), api_key: str = Query(None)):
- await websocket.accept()
- try:
- print(f"User {user_id} connected to WebSocket")
- # Validate the API key
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- await websocket.send_json({"detail": "Invalid API key or insufficient permissions"})
- await websocket.close()
- return
- # Continue as normal for all other episode IDs
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- print(f"User ID: {user_id}, Key ID: {key_id}, Web Key: {is_web_key}")
- if key_id != user_id and not is_web_key:
- await websocket.send_json({"detail": "You can only refresh your own podcasts"})
- await websocket.close()
- return
- if user_id in user_locks:
- await websocket.send_json({"detail": "Refresh job already running for this user."})
- await websocket.close()
- return
- if user_id not in active_websockets:
- active_websockets[user_id] = []
- print(f"Active WebSockets: {active_websockets}")
- active_websockets[user_id].append(websocket)
- # Create a lock for the user and start the refresh task
- user_locks[user_id] = Lock()
- try:
- # Acquire the lock
- user_locks[user_id].acquire()
- print(f"Acquired lock for user {user_id}")
- # Run the refresh process asynchronously without blocking the WebSocket
- task = asyncio.create_task(run_refresh_process(user_id, nextcloud_refresh, websocket, cnx))
- print(f"Task created for user {user_id}")
- # Keep the WebSocket connection alive while the task is running
- while not task.done():
- try:
- await asyncio.wait_for(websocket.receive_text(), timeout=1.0)
- except asyncio.TimeoutError:
- # This is expected, we're just using it to keep the connection alive
- pass
- except Exception as e:
- print(f"WebSocket disconnected: {str(e)}. Cancelling task.")
- task.cancel()
- break
- except Exception as e:
- await websocket.send_json({"detail": f"Error: {str(e)}"})
- finally:
- # Always release the lock and clean up
- user_locks[user_id].release()
- del user_locks[user_id]
- if user_id in active_websockets:
- active_websockets[user_id].remove(websocket)
- if not active_websockets[user_id]:
- del active_websockets[user_id]
- # For the WebSocket dependency, use the proper function
- close_database_connection(cnx)
- await websocket.close()
- except Exception as e:
- # Handle any unexpected errors
- await websocket.send_json({"detail": f"Unexpected error: {str(e)}"})
- await websocket.close()
-
-async def run_refresh_process(user_id, nextcloud_refresh, websocket, cnx):
- print("Starting refresh process")
- print(f"Running refresh process for user in job {user_id}")
- try:
- # First get total count of podcasts
- cursor = cnx.cursor()
- if database_type == "postgresql":
- cursor.execute('''
- SELECT COUNT(*), array_agg("podcastname")
- FROM "Podcasts"
- WHERE "userid" = %s
- ''', (user_id,))
- else:
- cursor.execute('''
- SELECT COUNT(*), GROUP_CONCAT(PodcastName)
- FROM Podcasts
- WHERE UserID = %s
- ''', (user_id,))
- count_result = cursor.fetchone()
- # Handle both dictionary and tuple results
- if isinstance(count_result, dict):
- total_podcasts = count_result['count'] if count_result else 0
- else:
- total_podcasts = count_result[0] if count_result else 0
- await websocket.send_json({
- "progress": {
- "current": 0,
- "total": total_podcasts,
- "current_podcast": ""
- }
- })
-
- # Get default device information for sync
- default_device_id = database_functions.functions.get_or_create_default_device(cnx, database_type, user_id)
- default_device_name = None
-
- if default_device_id:
- # Get the device name
- device_cursor = cnx.cursor()
- if database_type == "postgresql":
- device_query = 'SELECT DeviceName FROM "GpodderDevices" WHERE DeviceID = %s'
- else:
- device_query = "SELECT DeviceName FROM GpodderDevices WHERE DeviceID = %s"
-
- device_cursor.execute(device_query, (default_device_id,))
- device_result = device_cursor.fetchone()
- device_cursor.close()
-
- if device_result:
- default_device_name = device_result[0] if isinstance(device_result, tuple) else device_result["devicename"]
- print(f"Using default device for sync: {default_device_name} (ID: {default_device_id})")
- else:
- print("Default device ID found but no name - will use automatic fallback")
- else:
- print("No default device found - will use automatic fallback")
-
- if nextcloud_refresh:
- await websocket.send_json({"detail": "Refreshing Nextcloud subscriptions..."})
- print(f"Refreshing Nextcloud subscriptions for user {user_id}")
- gpodder_url, gpodder_token, gpodder_login = database_functions.functions.get_nextcloud_settings(database_type, cnx, user_id)
- pod_sync_type = database_functions.functions.get_gpodder_type(cnx, database_type, user_id)
- if pod_sync_type == "nextcloud":
- await asyncio.to_thread(database_functions.functions.refresh_nextcloud_subscription,
- database_type, cnx, user_id, gpodder_url, gpodder_token, gpodder_login, pod_sync_type,
- default_device_id, default_device_name, False)
- else:
- await asyncio.to_thread(database_functions.functions.refresh_gpodder_subscription,
- database_type, cnx, user_id, gpodder_url, gpodder_token, gpodder_login, pod_sync_type,
- default_device_id, default_device_name, False)
- await websocket.send_json({"detail": "Pod Sync subscription refresh complete."})
- # Get list of podcast names for progress updates
- print('Getting list')
- if database_type == "postgresql":
- cursor.execute('''
- SELECT "podcastid", "podcastname", "feedurl", "artworkurl", "autodownload",
- "username", "password", "isyoutubechannel", "feedcutoffdays"
- FROM "Podcasts"
- WHERE "userid" = %s
- ''', (user_id,))
- else:
- cursor.execute('''
- SELECT PodcastID, PodcastName, FeedURL, ArtworkURL, AutoDownload,
- Username, Password, IsYouTubeChannel, FeedCutoffDays
- FROM Podcasts
- WHERE UserID = %s
- ''', (user_id,))
- podcasts = cursor.fetchall()
- print('got list')
-
- # Process each podcast
- current = 0
- for podcast in podcasts:
- current += 1
- if isinstance(podcast, dict):
- if database_type == "postgresql":
- podcast_id = podcast['podcastid']
- podcast_name = podcast['podcastname']
- feed_url = podcast['feedurl']
- artwork_url = podcast['artworkurl']
- auto_download = podcast['autodownload']
- username = podcast['username']
- password = podcast['password']
- is_youtube = podcast['isyoutubechannel']
- feed_cutoff = podcast['feedcutoffdays']
- else:
- podcast_id = podcast['PodcastID']
- podcast_name = podcast['PodcastName']
- feed_url = podcast['FeedURL']
- artwork_url = podcast['ArtworkURL']
- auto_download = podcast['AutoDownload']
- username = podcast['Username']
- password = podcast['Password']
- is_youtube = podcast['IsYouTubeChannel']
- feed_cutoff = podcast['FeedCutoffDays']
- else:
- podcast_id, podcast_name, feed_url, artwork_url, auto_download, username, password, is_youtube, feed_cutoff = podcast
-
- await websocket.send_json({
- "progress": {
- "current": current,
- "total": total_podcasts,
- "current_podcast": podcast_name
- }
- })
-
- # Refresh this podcast
- # print(f'is it youtube?: {is_youtube}')
- try:
- if is_youtube is True:
- # Extract channel ID from feed URL
- channel_id = feed_url.split('channel/')[-1] if 'channel/' in feed_url else feed_url
- channel_id = channel_id.split('/')[0].split('?')[0]
- youtube_episodes = await asyncio.to_thread(
- database_functions.youtube.process_youtube_videos,
- database_type,
- podcast_id,
- channel_id,
- cnx,
- feed_cutoff
- )
- if youtube_episodes:
- for episode in youtube_episodes:
- if user_id in active_websockets:
- for ws in active_websockets[user_id]:
- await ws.send_json({"new_episode": episode})
- else:
- episodes = await asyncio.to_thread(
- database_functions.functions.add_episodes,
- cnx,
- database_type,
- podcast_id,
- feed_url,
- artwork_url,
- auto_download,
- username,
- password,
- True # websocket
- )
-
- if episodes:
- for episode in episodes:
- if user_id in active_websockets:
- for ws in active_websockets[user_id]:
- await ws.send_json({"new_episode": episode})
- except Exception as e:
- print(f"Error refreshing podcast {podcast_id}: {str(e)}")
- continue
-
- except Exception as e:
- await websocket.send_json({"detail": f"Error during refresh: {e}"})
- finally:
- # Clear explicit reference
- if cnx:
- try:
- # Get connection type
- connection_type = type(cnx).__name__
- print(f"Closing connection of type: {connection_type}")
-
- # For PooledMySQLConnection
- if connection_type == "PooledMySQLConnection":
- print("Detected PooledMySQLConnection - using special handling")
- # DO NOTHING - don't try to close or modify it
- # Just let it go out of scope and be garbage collected
- pass
- # Regular MySQL connection
- elif "MySQL" in connection_type:
- print("Detected MySQL connection - using basic close")
- try:
- cnx.close()
- except Exception as e:
- print(f"MySQL close error (ignored): {e}")
- # PostgreSQL connection
- elif hasattr(cnx, 'closed'):
- print("Detected PostgreSQL connection")
- if not cnx.closed:
- cnx.close()
- # Generic
- elif hasattr(cnx, 'close'):
- print("Using generic close method")
- cnx.close()
-
- print("Connection handling complete")
- except Exception as e:
- print(f"Connection handling error: {e}")
-
- # Force drop reference regardless of what happened above
- cnx = None
-
- # Force garbage collection
- import gc
- gc.collect()
- print("Garbage collection complete")
-
-@app.get("/api/data/get_stats")
-async def api_get_stats(user_id: int, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- logging.info('Fetching API key')
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
- logging.info('Getting key ID')
- logger.info(f'id {user_id}')
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- logging.info(f'Got key ID: {key_id}')
-
- # Allow the action if the API key belongs to the user or it's the web API key
- if key_id == user_id or is_web_key:
- stats = database_functions.functions.get_stats(cnx, database_type, user_id)
- logging.info('Got stats')
- if stats is None:
- raise HTTPException(status_code=404, detail="Stats not found for the given user ID")
- return stats
- else:
- raise HTTPException(status_code=403, detail="You can only get stats for your own account.")
-
-
-
-@app.get("/api/data/get_user_episode_count")
-async def api_get_user_episode_count(user_id: int, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
-
- if not is_valid_key:
- logging.error(f"not valid key")
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- elevated_access = await has_elevated_access(api_key, cnx)
-
- if not elevated_access:
- # Get user ID from API key
- user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if user_id != user_id_from_api_key:
- raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
- detail="You are not authorized to access these user details")
- episode_count = database_functions.functions.get_user_episode_count(cnx, database_type, user_id)
- if episode_count:
- return episode_count
- else:
- raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found")
-
-
-@app.get("/api/data/get_user_info")
-async def api_get_user_info(is_admin: bool = Depends(check_if_admin), cnx=Depends(get_database_connection)):
- user_info = database_functions.functions.get_user_info(database_type, cnx)
- return user_info
-
-@app.get("/api/data/my_user_info/{user_id}")
-async def api_get_my_user_info(
- user_id: int,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- try:
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(
- status_code=403,
- detail="Your API key is either invalid or does not have correct permission"
- )
-
- # Check if the API key belongs to the requested user_id
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- is_web_key = api_key == base_webkey.web_key
-
- if key_id != user_id and not is_web_key:
- raise HTTPException(
- status_code=403,
- detail="You can only retrieve your own user information!"
- )
-
- user_info = database_functions.functions.get_my_user_info(database_type, cnx, user_id)
- if not user_info:
- raise HTTPException(status_code=404, detail="User not found")
-
- return user_info
-
- except HTTPException:
- raise
- except Exception as e:
- logging.error(f"Error in api_get_my_user_info: {str(e)}")
- raise HTTPException(status_code=500, detail="An error occurred while retrieving user information")
-
-@app.get("/api/data/check_podcast", response_model=Dict[str, bool])
-async def api_check_podcast(
- user_id: int,
- podcast_name: str,
- podcast_url: str,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if is_valid_key:
- exists = database_functions.functions.check_podcast(cnx, database_type, user_id, podcast_name, podcast_url)
- return {"exists": exists}
- else:
- raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission")
-
-@app.get("/api/data/check_youtube_channel", response_model=Dict[str, bool])
-async def api_check_youtube_channel(
- user_id: int,
- channel_name: str,
- channel_url: str,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if is_valid_key:
- exists = database_functions.functions.check_youtube_channel(
- cnx, database_type, user_id, channel_name, channel_url
- )
- return {"exists": exists}
- else:
- raise HTTPException(
- status_code=403,
- detail="Your API key is either invalid or does not have correct permission"
- )
-
-
-@app.get("/api/data/user_admin_check/{user_id}")
-async def api_user_admin_check_route(user_id: int, api_key: str = Depends(get_api_key_from_header),
- cnx=Depends(get_database_connection)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
- elevated_access = await has_elevated_access(api_key, cnx)
- if not elevated_access:
- # Get user ID from API key
- user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if user_id != user_id_from_api_key:
- raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
- detail="You are not authorized to check admin status for other users")
- is_admin = await run_in_threadpool(database_functions.functions.user_admin_check, cnx, database_type, user_id)
- return {"is_admin": is_admin}
-
-class RemoveYouTubeChannelData(BaseModel):
- user_id: int
- channel_name: str
- channel_url: str
-
-@app.post("/api/data/remove_youtube_channel")
-async def api_remove_youtube_channel_route(
- data: RemoveYouTubeChannelData = Body(...),
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(
- status_code=403,
- detail="Your API key is either invalid or does not have correct permission"
- )
-
- elevated_access = await has_elevated_access(api_key, cnx)
- if not elevated_access:
- user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- if data.user_id != user_id_from_api_key:
- raise HTTPException(
- status_code=status.HTTP_403_FORBIDDEN,
- detail="You are not authorized to remove channels for other users"
- )
-
- database_functions.functions.remove_youtube_channel_by_url(
- cnx, database_type, data.channel_name, data.channel_url, data.user_id
- )
- return {"success": True}
-
-class RemovePodcastData(BaseModel):
- user_id: int
- podcast_name: str
- podcast_url: str
-
-
-@app.post("/api/data/remove_podcast")
-async def api_remove_podcast_route(data: RemovePodcastData = Body(...), cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
- elevated_access = await has_elevated_access(api_key, cnx)
- if not elevated_access:
- # Get user ID from API key
- user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- if data.user_id != user_id_from_api_key:
- raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
- detail="You are not authorized to remove podcasts for other users")
-
- # First, get the podcast ID and check if it's a YouTube channel
- podcast_id = database_functions.functions.get_podcast_id(database_type, cnx, data.user_id, data.podcast_url, data.podcast_name)
-
- if podcast_id is None:
- raise HTTPException(status_code=404, detail="Podcast not found")
-
- # Check if this is a YouTube channel
- is_youtube = database_functions.functions.check_youtube_channel_id(cnx, database_type, podcast_id)
-
- # Track if episodes have been handled
- episodes_handled = False
-
- if database_functions.functions.check_gpodder_settings(database_type, cnx, data.user_id):
- logging.info('get cloud vals')
- gpodder_url, gpodder_token, gpodder_login = database_functions.functions.get_nextcloud_settings(database_type, cnx, data.user_id)
-
- # Get the full gpodder settings to check URL
- gpodder_settings = database_functions.functions.get_gpodder_settings(database_type, cnx, data.user_id)
-
- logging.info('em cloud')
- podcast_feed = database_functions.functions.get_podcast_feed_by_id(cnx, database_type, podcast_id)
- gpod_type = database_functions.functions.get_gpodder_type(cnx, database_type, data.user_id)
-
- # Get the correct device name, matching what we do in add_podcast
- device_name = f"pinepods-internal-{data.user_id}" # Default device name
- if gpod_type == "gpodder":
- default_device = database_functions.functions.get_default_gpodder_device(cnx, database_type, data.user_id)
- if default_device:
- device_name = default_device["name"]
-
- if gpod_type == "nextcloud":
- database_functions.functions.remove_podcast_from_nextcloud(cnx, database_type, gpodder_url, gpodder_login, gpodder_token, podcast_feed)
- else:
- # Modified return value includes whether episodes were handled
- success, episodes_handled = database_functions.functions.remove_podcast_from_opodsync(
- cnx, database_type, data.user_id, gpodder_url, gpodder_login,
- gpodder_token, podcast_feed, device_name
- )
-
- # Only run the appropriate remove function if episodes weren't already handled by gpodder sync
- if not episodes_handled:
- if is_youtube:
- database_functions.functions.remove_youtube_channel(cnx, database_type, podcast_id, data.user_id)
- else:
- database_functions.functions.remove_podcast(cnx, database_type, data.podcast_name, data.podcast_url, data.user_id)
- else:
- logging.info('skipping remove - already handled by gpodder sync')
-
- return {"success": True}
-
-class RemovePodcastIDData(BaseModel):
- user_id: int
- podcast_id: int
- is_youtube: bool = False
-
-@app.post("/api/data/remove_podcast_id")
-async def api_remove_podcast_route_id(data: RemovePodcastIDData = Body(...),
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
- elevated_access = await has_elevated_access(api_key, cnx)
- if not elevated_access:
- user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- if data.user_id != user_id_from_api_key:
- raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
- detail="You are not authorized to remove content for other users")
- if data.is_youtube:
- database_functions.functions.remove_youtube_channel(cnx, database_type, data.podcast_id, data.user_id)
- else:
- # Existing podcast removal logic
- logging.info('check gpod')
- episodes_handled = False # Track whether episodes were already handled by gpodder sync
-
- if database_functions.functions.check_gpodder_settings(database_type, cnx, data.user_id):
- logging.info('get cloud vals')
- gpodder_url, gpodder_token, gpodder_login = database_functions.functions.get_nextcloud_settings(database_type, cnx, data.user_id)
-
- # Get the full gpodder settings to check URL
- gpodder_settings = database_functions.functions.get_gpodder_settings(database_type, cnx, data.user_id)
-
- logging.info('em cloud')
- podcast_feed = database_functions.functions.get_podcast_feed_by_id(cnx, database_type, data.podcast_id)
- gpod_type = database_functions.functions.get_gpodder_type(cnx, database_type, data.user_id)
-
- # Get the correct device name, matching what we do in add_podcast
- device_name = f"pinepods-internal-{data.user_id}" # Default device name
- if gpod_type == "gpodder":
- default_device = database_functions.functions.get_default_gpodder_device(cnx, database_type, data.user_id)
- if default_device:
- device_name = default_device["name"]
-
- if gpod_type == "nextcloud":
- database_functions.functions.remove_podcast_from_nextcloud(cnx, database_type, gpodder_url, gpodder_login, gpodder_token, podcast_feed)
- else:
- # Modified return value includes whether episodes were handled
- success, episodes_handled = database_functions.functions.remove_podcast_from_opodsync(
- cnx, database_type, data.user_id, gpodder_url, gpodder_login,
- gpodder_token, podcast_feed, device_name
- )
-
- # Only run remove_podcast_id if episodes weren't already handled by gpodder sync
- if not episodes_handled:
- logging.info('rm pod id')
- database_functions.functions.remove_podcast_id(cnx, database_type, data.podcast_id, data.user_id)
- else:
- logging.info('skipping rm pod id - already handled by gpodder sync')
-
- return {"success": True}
-
-
-@app.get("/api/data/return_pods/{user_id}")
-async def api_return_pods(user_id: int, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- try:
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if key_id == user_id or is_web_key:
- pods = database_functions.functions.return_pods(database_type, cnx, user_id)
-
- # Return empty list if no podcasts found
- if not pods:
- return {"pods": []}
-
- # Filter out any None values that might have slipped through
- cleaned_pods = []
- for pod in pods:
- if pod and isinstance(pod, dict):
- cleaned_pod = {
- k: v if v is not None else ""
- for k, v in pod.items()
- }
- cleaned_pods.append(cleaned_pod)
-
- return {"pods": cleaned_pods}
- else:
- raise HTTPException(status_code=403,
- detail="You can only return pods for yourself!")
-
- except Exception as e:
- logging.error(f"Error in api_return_pods: {str(e)}")
- return {"pods": [], "error": "An error occurred while retrieving podcasts"}
-
-@app.get("/api/data/user_history/{user_id}")
-async def api_user_history(user_id: int, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- # Allow the action if the API key belongs to the user or it's the web API key
- if key_id == user_id or is_web_key:
- history = database_functions.functions.user_history(cnx, database_type, user_id)
- return {"data": history}
- else:
- raise HTTPException(status_code=403,
- detail="You can only return history for yourself!")
-
-
-
-@app.get("/api/data/saved_episode_list/{user_id}")
-async def api_saved_episode_list(user_id: int, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- # Allow the action if the API key belongs to the user or it's the web API key
- if key_id == user_id or is_web_key:
- saved_episodes = database_functions.functions.saved_episode_list(database_type, cnx, user_id)
- return {"saved_episodes": saved_episodes}
- else:
- raise HTTPException(status_code=403,
- detail="You can only return saved episodes for yourself!")
-
-
-@app.get("/api/data/download_episode_list")
-async def api_download_episode_list(cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header),
- user_id: int = Query(...)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- # Allow the action if the API key belongs to the user or it's the web API key
- if key_id == user_id or is_web_key:
- downloaded_episodes = database_functions.functions.download_episode_list(database_type, cnx, user_id)
- return {"downloaded_episodes": downloaded_episodes}
- else:
- raise HTTPException(status_code=403,
- detail="You can only return downloaded episodes for yourself!")
-
-
-class UserValues(BaseModel):
- fullname: str
- username: str
- email: str
- hash_pw: str
-
-@app.post("/api/data/add_user")
-async def api_add_user(is_admin: bool = Depends(check_if_admin),
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header),
- user_values: UserValues = Body(...)):
- try:
- user_id = database_functions.functions.add_user(cnx, database_type, (
- user_values.fullname, user_values.username.lower(), user_values.email, user_values.hash_pw))
-
- if not user_id:
- raise HTTPException(
- status_code=500,
- detail="Failed to create user - no user ID returned"
- )
-
- return {"detail": "Success", "user_id": user_id}
-
- except psycopg.errors.UniqueViolation as e:
- error_detail = str(e)
- if "Users_username_key" in error_detail:
- raise HTTPException(
- status_code=409,
- detail="This username is already taken. Please choose a different username."
- )
- elif "Users_email_key" in error_detail:
- raise HTTPException(
- status_code=409,
- detail="This email address is already registered. Please use a different email."
- )
- else:
- raise HTTPException(
- status_code=409,
- detail="A conflict occurred while creating the user. Please try again with different credentials."
- )
-
- except psycopg.errors.OperationalError as e:
- logging.error(f"Database operational error: {str(e)}")
- raise HTTPException(
- status_code=503,
- detail="Unable to connect to the database. Please try again later."
- )
-
- except mysql.connector.errors.IntegrityError as e:
- error_msg = str(e)
- if "Duplicate entry" in error_msg and "username" in error_msg.lower():
- raise HTTPException(
- status_code=409,
- detail="This username is already taken. Please choose a different username."
- )
- elif "Duplicate entry" in error_msg and "email" in error_msg.lower():
- raise HTTPException(
- status_code=409,
- detail="This email address is already registered. Please use a different email."
- )
- else:
- raise HTTPException(
- status_code=409,
- detail="A conflict occurred while creating the user. Please try again with different credentials."
- )
-
- except Exception as e:
- logging.error(f"Unexpected error adding user: {str(e)}")
- raise HTTPException(
- status_code=500,
- detail=f"An unexpected error occurred while creating the user: {str(e)}"
- )
-
-
-@app.post("/api/data/add_login_user")
-async def api_add_user(cnx=Depends(get_database_connection),
- user_values: UserValues = Body(...)):
- try:
- self_service = database_functions.functions.check_self_service(cnx, database_type)
- if not self_service:
- raise HTTPException(
- status_code=403,
- detail="Your API key is either invalid or does not have correct permission"
- )
-
- user_id = database_functions.functions.add_user(cnx, database_type, (
- user_values.fullname, user_values.username.lower(), user_values.email, user_values.hash_pw))
-
- if not user_id:
- raise HTTPException(
- status_code=500,
- detail="Failed to create user account - no user ID returned"
- )
-
- return {"detail": "User added successfully", "user_id": user_id}
-
- except UniqueViolation as e:
- error_detail = str(e)
- if "Users_username_key" in error_detail:
- raise HTTPException(
- status_code=409,
- detail="This username is already taken. Please choose a different username."
- )
- elif "Users_email_key" in error_detail:
- raise HTTPException(
- status_code=409,
- detail="This email address is already registered. Please use a different email."
- )
- else:
- raise HTTPException(
- status_code=409,
- detail="A conflict occurred while creating the user. Please try again with different credentials."
- )
-
- except OperationalError as e:
- logging.error(f"Database operational error: {str(e)}")
- raise HTTPException(
- status_code=503,
- detail="Unable to connect to the database. Please try again later."
- )
-
- except Exception as e:
- logging.error(f"Unexpected error adding user: {str(e)}")
- raise HTTPException(
- status_code=500,
- detail=f"An unexpected error occurred while creating your account: {str(e)}"
- )
-
-@app.put("/api/data/set_fullname/{user_id}")
-async def api_set_fullname(user_id: int, new_name: str = Query(...), cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
-
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- elevated_access = await has_elevated_access(api_key, cnx)
-
- if not elevated_access:
- # Get user ID from API key
- user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if user_id != user_id_from_api_key:
- raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
- detail="You are not authorized to access these user details")
- try:
- database_functions.functions.set_fullname(cnx, database_type, user_id, new_name)
- return {"detail": "Fullname updated."}
- except:
- raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found")
-
-
-class PasswordUpdateRequest(BaseModel):
- hash_pw: str
-
-@app.put("/api/data/set_password/{user_id}")
-async def api_set_password(
- user_id: int,
- request: PasswordUpdateRequest, # Use the Pydantic model
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- hash_pw = request.hash_pw # Extract the hash_pw from the request model
-
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
-
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission")
-
- elevated_access = await has_elevated_access(api_key, cnx)
-
- if not elevated_access:
- user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, api_key)
-
- if user_id != user_id_from_api_key:
- raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="You are not authorized to access these user details")
-
- try:
- database_functions.functions.set_password(cnx, database_type, user_id, hash_pw)
- return {"detail": "Password updated."}
- except Exception as e:
- raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"User not found. Error: {str(e)}")
-
-@app.put("/api/data/user/set_email")
-async def api_set_email(cnx=Depends(get_database_connection), api_key: str = Depends(get_api_key_from_header),
- user_id: int = Body(...), new_email: str = Body(...)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
-
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- elevated_access = await has_elevated_access(api_key, cnx)
-
- if not elevated_access:
- # Get user ID from API key
- user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if user_id != user_id_from_api_key:
- raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
- detail="You are not authorized to access these user details")
- try:
- database_functions.functions.set_email(cnx, database_type, user_id, new_email)
- return {"detail": "Email updated."}
- except:
- raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found")
-
-
-@app.put("/api/data/user/set_username")
-async def api_set_username(cnx=Depends(get_database_connection), api_key: str = Depends(get_api_key_from_header),
- user_id: int = Body(...), new_username: str = Body(...)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
-
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- elevated_access = await has_elevated_access(api_key, cnx)
-
- if not elevated_access:
- # Get user ID from API key
- user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if user_id != user_id_from_api_key:
- raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
- detail="You are not authorized to access these user details")
- try:
- database_functions.functions.set_username(cnx, database_type, user_id, new_username.lower())
- return {"detail": "Username updated."}
- except:
- raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found")
-
-
-@app.put("/api/data/user/set_isadmin")
-async def api_set_isadmin(is_admin: bool = Depends(check_if_admin), cnx=Depends(get_database_connection),
- user_id: int = Body(...), isadmin: bool = Body(...)):
- database_functions.functions.set_isadmin(cnx, database_type, user_id, isadmin)
- return {"detail": "IsAdmin status updated."}
-
-
-@app.get("/api/data/user/final_admin/{user_id}")
-async def api_final_admin(is_admin: bool = Depends(check_if_admin), cnx=Depends(get_database_connection),
- user_id: int = Path(...)):
- is_final_admin = database_functions.functions.final_admin(cnx, database_type, user_id)
- return {"final_admin": is_final_admin}
-
-
-@app.delete("/api/data/user/delete/{user_id}")
-async def api_delete_user(is_admin: bool = Depends(check_if_admin), cnx=Depends(get_database_connection),
- user_id: int = Path(...)):
- database_functions.functions.delete_user(cnx, database_type, user_id)
- return {"status": "User deleted"}
-
-
-class OIDCProviderValues(BaseModel):
- provider_name: str
- client_id: str
- client_secret: str
- authorization_url: str
- token_url: str
- user_info_url: str
- button_text: str
- scope: Optional[str] = "openid email profile"
- button_color: Optional[str] = "#000000"
- button_text_color: Optional[str] = "#000000"
- icon_svg: Optional[str] = None
- name_claim: Optional[str] = None
- email_claim: Optional[str] = None
- username_claim: Optional[str] = None
- roles_claim: Optional[str] = None
- user_role: Optional[str] = None
- admin_role: Optional[str] = None
-
-@app.post("/api/data/add_oidc_provider")
-async def api_add_oidc_provider(
- is_admin: bool = Depends(check_if_admin),
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header),
- provider_values: OIDCProviderValues = Body(...)):
- try:
- provider_id = database_functions.functions.add_oidc_provider(cnx, database_type, (
- provider_values.provider_name,
- provider_values.client_id,
- provider_values.client_secret,
- provider_values.authorization_url,
- provider_values.token_url,
- provider_values.user_info_url,
- provider_values.button_text,
- provider_values.scope,
- provider_values.button_color,
- provider_values.button_text_color,
- provider_values.icon_svg,
- provider_values.name_claim,
- provider_values.email_claim,
- provider_values.username_claim,
- provider_values.roles_claim,
- provider_values.user_role,
- provider_values.admin_role
- ))
- if not provider_id:
- raise HTTPException(
- status_code=500,
- detail="Failed to create provider - no provider ID returned"
- )
- return {"detail": "Success", "provider_id": provider_id}
- except psycopg.errors.UniqueViolation:
- raise HTTPException(
- status_code=409,
- detail="A provider with this name already exists"
- )
- except Exception as e:
- logging.error(f"Unexpected error adding provider: {str(e)}")
- raise HTTPException(
- status_code=500,
- detail=f"An unexpected error occurred while creating the provider: {str(e)}"
- )
-
-@app.post("/api/data/remove_oidc_provider")
-async def api_remove_oidc_provider(
- is_admin: bool = Depends(check_if_admin),
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header),
- provider_id: int = Body(...)):
- try:
- result = database_functions.functions.remove_oidc_provider(cnx, database_type, provider_id)
- if not result:
- raise HTTPException(
- status_code=404,
- detail="Provider not found"
- )
- return {"detail": "Success"}
- except Exception as e:
- logging.error(f"Unexpected error removing provider: {str(e)}")
- raise HTTPException(
- status_code=500,
- detail=f"An unexpected error occurred while removing the provider: {str(e)}"
- )
-
-@app.get("/api/data/list_oidc_providers")
-async def api_list_oidc_providers(
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- try:
- providers = database_functions.functions.list_oidc_providers(cnx, database_type)
- return {"providers": providers}
- except Exception as e:
- logging.error(f"Unexpected error listing providers: {str(e)}")
- raise HTTPException(
- status_code=500,
- detail=f"An unexpected error occurred while listing providers: {str(e)}"
- )
-
-# Public reqeust for login info
-@app.get("/api/data/public_oidc_providers")
-async def api_public_oidc_providers(cnx=Depends(get_database_connection)):
- """Get minimal OIDC provider info needed for login screen buttons."""
- try:
- providers = database_functions.functions.get_public_oidc_providers(cnx, database_type)
- return {"providers": providers}
- except Exception as e:
- logging.error(f"Unexpected error getting public provider info: {str(e)}")
- raise HTTPException(
- status_code=500,
- detail=f"An unexpected error occurred: {str(e)}"
- )
-
-
-@app.put("/api/data/user/set_theme")
-async def api_set_theme(user_id: int = Body(...), new_theme: str = Body(...), cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- # Allow the action if the API key belongs to the user or it's the web API key
- if key_id == user_id or is_web_key:
- database_functions.functions.set_theme(cnx, database_type, user_id, new_theme)
- return {"message": "Theme updated successfully"}
- else:
- raise HTTPException(status_code=403,
- detail="You can only set your own theme!")
-
-@app.post("/api/data/create_api_key")
-async def api_create_api_key(
- user_id: int = Body(..., embed=True),
- rssonly: bool = Body(..., embed=True),
- podcast_ids: Optional[List[int]] = Body(None, embed=True),
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- if user_id == key_id or is_web_key:
- if rssonly:
- new_key = database_functions.functions.create_rss_key(cnx, database_type, user_id, podcast_ids)
- else:
- new_key = database_functions.functions.create_api_key(cnx, database_type, user_id)
- return {"rss_key" if rssonly else "api_key": new_key}
- else:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
-@app.post("/api/data/set_rss_key_podcasts")
-async def api_set_rss_key_podcasts(
- user_id: int = Body(..., embed=True),
- rss_key_id: int = Body(..., embed=True),
- podcast_ids: Optional[List[int]] = Body(None, embed=True),
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- if user_id == key_id or is_web_key:
- database_functions.functions.set_rss_key_podcasts(cnx, database_type, rss_key_id, podcast_ids)
- return {"message": "Podcast IDs updated successfully"}
- else:
- raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission")
-
-class SendTestEmailValues(BaseModel):
- server_name: str
- server_port: str
- from_email: str
- send_mode: str
- encryption: str
- auth_required: bool
- email_username: str
- email_password: str
- to_email: str
- message: str # Add this line
-
-
-def send_email(payload: SendTestEmailValues):
- # This is now a synchronous function
- msg = MIMEMultipart()
- msg['From'] = payload.from_email
- msg['To'] = payload.to_email
- msg['Subject'] = "Test Email"
- msg.attach(MIMEText(payload.message, 'plain'))
- try:
- port = int(payload.server_port) # Convert port to int here
- if payload.encryption == "SSL/TLS":
- server = smtplib.SMTP_SSL(payload.server_name, port)
- else:
- server = smtplib.SMTP(payload.server_name, port)
- if payload.encryption == "StartTLS":
- server.starttls()
- if payload.auth_required:
- server.login(payload.email_username, payload.email_password)
- server.send_message(msg)
- server.quit()
- return "Email sent successfully"
- except Exception as e:
- raise Exception(f"Failed to send email: {str(e)}")
-
-@app.post("/api/data/send_test_email")
-async def api_send_email(payload: SendTestEmailValues, is_admin: bool = Depends(check_if_admin), cnx=Depends(get_database_connection), api_key: str = Depends(get_api_key_from_header)):
- # Assume API key validation logic here
- try:
- # Use run_in_threadpool to execute the synchronous send_email function
- send_status = await run_in_threadpool(send_email, payload)
- return {"email_status": send_status}
- except Exception as e:
- print(traceback.format_exc()) # Print full exception information
- raise HTTPException(status_code=500, detail=f"Failed to send email: {str(e)}")
-
-class SendEmailValues(BaseModel):
- to_email: str
- subject : str
- message: str # Add this line
-
-def send_email_with_settings(email_values, database_type, payload: SendEmailValues):
-
- try:
- msg = MIMEMultipart()
- msg['From'] = email_values['FromEmail']
- msg['To'] = payload.to_email
- msg['Subject'] = payload.subject
- msg.attach(MIMEText(payload.message, 'plain'))
-
- try:
- port = int(email_values['ServerPort'])
- if email_values['Encryption'] == "SSL/TLS":
- server = smtplib.SMTP_SSL(email_values['ServerName'], port)
- elif email_values['Encryption'] == "StartTLS":
- server = smtplib.SMTP(email_values['ServerName'], port)
- server.starttls()
- else:
- server = smtplib.SMTP(email_values['ServerName'], port)
-
- if email_values['AuthRequired']:
- server.login(email_values['Username'], email_values['Password'])
-
- server.send_message(msg)
- server.quit()
- return "Email sent successfully"
- except Exception as e:
- raise Exception(f"Failed to send email: {str(e)}")
- except Exception as e:
- logging.error(f"Failed to send email: {str(e)}", exc_info=True)
- raise Exception(f"Failed to send email: {str(e)}")
-
-
-@app.post("/api/data/send_email")
-async def api_send_email(payload: SendEmailValues, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Invalid API key")
-
- email_values = database_functions.functions.get_email_settings(cnx, database_type)
- if not email_values:
- raise HTTPException(status_code=404, detail="Email settings not found")
-
- try:
- send_status = await run_in_threadpool(send_email_with_settings, email_values, database_type, payload)
- return {"email_status": send_status}
- except Exception as e:
- raise HTTPException(status_code=500, detail=f"Failed to send email: {str(e)}")
-
-
-@app.post("/api/data/save_email_settings")
-async def api_save_email_settings(email_settings: dict = Body(..., embed=True),
- is_admin: bool = Depends(check_if_admin), cnx=Depends(get_database_connection)):
- database_functions.functions.save_email_settings(cnx, database_type, email_settings)
- return {"message": "Email settings saved."}
-
-
-@app.get("/api/data/get_encryption_key")
-async def api_get_encryption_key(is_admin: bool = Depends(check_if_admin), cnx=Depends(get_database_connection)):
- encryption_key = database_functions.functions.get_encryption_key(cnx, database_type)
- return {"encryption_key": encryption_key}
-
-
-@app.get("/api/data/get_email_settings")
-async def api_get_email_settings(is_admin: bool = Depends(check_if_admin), cnx=Depends(get_database_connection)):
- email_settings = database_functions.functions.get_email_settings(cnx, database_type)
- return email_settings
-
-
-class DeleteAPIKeyHeaders(BaseModel):
- api_id: str
- user_id: str
-
-
-@app.delete("/api/data/delete_api_key")
-async def api_delete_api_key(payload: DeleteAPIKeyHeaders, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
-
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- elevated_access = await has_elevated_access(api_key, cnx)
-
- if not elevated_access:
- # Get user ID from API key
- user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if payload.user_id != user_id_from_api_key:
- raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
- detail="You are not authorized to access or remove other users api-keys.")
- # Check if the API key to be deleted is the same as the one used in the current request
- if database_functions.functions.is_same_api_key(cnx, database_type, payload.api_id, api_key):
- raise HTTPException(status_code=403,
- detail="You cannot delete the API key that is currently in use.")
- # Check if the API key belongs to the guest user (user_id 1)
- if database_functions.functions.belongs_to_guest_user(cnx, database_type, payload.api_id):
- raise HTTPException(status_code=403,
- detail="Cannot delete guest user api.")
-
- # Proceed with deletion if the checks pass
- database_functions.functions.delete_api(cnx, database_type, payload.api_id)
- return {"detail": "API key deleted."}
-
-
-@app.get("/api/data/get_api_info/{user_id}")
-async def api_get_api_info(cnx=Depends(get_database_connection), api_key: str = Depends(get_api_key_from_header),
- user_id: int = Path(...)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
-
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
- elevated_access = await has_elevated_access(api_key, cnx)
-
- if not elevated_access:
- # Get user ID from API key
- user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if user_id != user_id_from_api_key:
- raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
- detail="You are not authorized to access these user details")
- api_information = database_functions.functions.get_api_info(database_type, cnx, user_id)
- if api_information:
- return {"api_info": api_information}
- else:
- raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found")
-
-
-class ResetCodePayload(BaseModel):
- email: str
- username: str
-
-
-class ResetPasswordPayload(BaseModel):
- email: str
- hashed_pw: str
-
-
-@app.post("/api/data/reset_password_create_code")
-async def api_reset_password_route(payload: ResetCodePayload, cnx=Depends(get_database_connection)):
- email_setup = database_functions.functions.get_email_settings(cnx, database_type)
- if email_setup['Server_Name'] == "default_server":
- raise HTTPException(status_code=403,
- detail="Email settings not configured. Please contact your administrator.")
- else:
- check_user = database_functions.functions.check_reset_user(cnx, database_type, payload.username.lower(), payload.email)
- if check_user:
- create_code = database_functions.functions.reset_password_create_code(cnx, database_type, payload.email)
-
- # Create a SendTestEmailValues instance with the email setup values and the password reset code
- email_payload = SendEmailValues(
- to_email=payload.email,
- subject="Pinepods Password Reset Code",
- message=f"Your password reset code is {create_code}"
- )
- # Send the email with the password reset code
- email_send = send_email_with_settings(email_setup, email_payload)
- if email_send:
- return {"code_created": True}
- else:
- database_functions.functions.reset_password_remove_code(cnx, database_type, payload.email)
- raise HTTPException(status_code=500, detail="Failed to send email")
-
- return {"user_exists": user_exists}
- else:
- raise HTTPException(status_code=404, detail="User not found")
-
-class ResetVerifyCodePayload(BaseModel):
- reset_code: str
- email: str
- new_password: str
-
-@app.post("/api/data/verify_and_reset_password")
-async def api_verify_and_reset_password_route(payload: ResetVerifyCodePayload, cnx=Depends(get_database_connection)):
- code_valid = database_functions.functions.verify_reset_code(cnx, database_type, payload.email, payload.reset_code)
- if code_valid is None:
- raise HTTPException(status_code=404, detail="User not found")
- elif not code_valid:
- raise HTTPException(status_code=400, detail="Code is invalid")
- # return {"code_valid": False}
-
- message = database_functions.functions.reset_password_prompt(cnx, database_type, payload.email, payload.new_password)
- if message is None:
- raise HTTPException(status_code=500, detail="Failed to reset password")
- return {"message": message}
-
-class EpisodeMetadata(BaseModel):
- episode_id: int
- user_id: int
- person_episode: bool = False # Default to False if not specified
- is_youtube: bool = False
-
-@app.post("/api/data/get_episode_metadata")
-async def api_get_episode_metadata(data: EpisodeMetadata, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if key_id == data.user_id or is_web_key:
- episode = database_functions.functions.get_episode_metadata(
- database_type,
- cnx,
- data.episode_id,
- data.user_id,
- data.person_episode,
- data.is_youtube
- )
- return {"episode": episode}
- else:
- raise HTTPException(status_code=403,
- detail="You can only get metadata for yourself!")
-
-class GetPlaybackSpeed(BaseModel):
- podcast_id: Optional[int] = None
- user_id: int
-
-@app.post("/api/data/get_playback_speed")
-async def api_get_playback_speed(data: GetPlaybackSpeed, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- if key_id == data.user_id or is_web_key:
- # Fix the parameter order to match the function definition
- is_youtube = False # Add the is_youtube parameter
- playback_speed = database_functions.functions.get_playback_speed(
- cnx, # Connection should be first
- database_type, # Then database type
- data.user_id, # Then user_id
- is_youtube, # Then is_youtube parameter
- data.podcast_id # Then optional podcast_id
- )
- return {"playback_speed": playback_speed}
- else:
- raise HTTPException(status_code=403,
- detail="You can only get metadata for yourself!")
-
-@app.get("/api/data/generate_mfa_secret/{user_id}")
-async def generate_mfa_secret(user_id: int, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- # Perform API key validation and user authorization checks as before
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- logging.warning(f"Invalid API key: {api_key}")
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
- logging.info(f"Is web key: {is_web_key}")
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- logging.info(f"Key ID from API key: {key_id}")
-
- # Allow the action if the API key belongs to the user or it's the web API key
- if key_id == user_id or is_web_key:
- user_details = database_functions.functions.get_user_details_id(cnx, database_type, user_id)
- if not user_details:
- raise HTTPException(status_code=404, detail="User not found")
-
- email = user_details['Email']
- secret = random_base32() # Correctly generate a random base32 secret
- # Store the secret in temporary storage
- temp_mfa_secrets[user_id] = secret
- totp = TOTP(secret)
- provisioning_uri = totp.provisioning_uri(name=email, issuer_name="Pinepods")
-
- # Generate QR code as SVG
- qr = qrcode.QRCode(
- version=1,
- error_correction=qrcode.constants.ERROR_CORRECT_L,
- box_size=10,
- border=4,
- )
- qr.add_data(provisioning_uri)
- qr.make(fit=True)
-
- # Convert the QR code to an SVG string
- factory = qrcode.image.svg.SvgPathImage
- img = qr.make_image(fill_color="black", back_color="white", image_factory=factory)
- buffered = io.BytesIO()
- img.save(buffered)
- qr_code_svg = buffered.getvalue().decode("utf-8")
- logging.info(f"Generated MFA secret for user {user_id}")
-
- return {
- "secret": secret,
- "qr_code_svg": qr_code_svg # Directly return the SVG string
- }
- else:
- logging.warning("Attempted to generate MFA secret for another user")
- raise HTTPException(status_code=403,
- detail="You can only generate MFA secrets for yourself!")
-
-class VerifyTempMFABody(BaseModel):
- user_id: int
- mfa_code: str
-
-@app.post("/api/data/verify_temp_mfa")
-async def verify_temp_mfa(body: VerifyTempMFABody, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- # Perform API key validation and user authorization checks as before
- logging.info(f"Verifying MFA code for user_id: {body.user_id} with code: {body.mfa_code}")
-
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- logging.warning(f"Invalid API key: {api_key}")
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
- logging.info(f"Is web key: {is_web_key}")
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- logging.info(f"Key ID from API key: {key_id}")
-
- if key_id == body.user_id or is_web_key:
- secret = temp_mfa_secrets.get(body.user_id)
- if secret is None:
- raise HTTPException(status_code=status.HTTP_404_NOT_FOUND,
- detail="MFA setup not initiated or expired.")
- if secret:
- logging.info(f"Retrieved secret for user_id")
- else:
- logging.warning(f"No secret found for user_id")
- raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="MFA setup not initiated or expired.")
-
- totp = TOTP(secret)
- if totp.verify(body.mfa_code):
- try:
- # Attempt to save the MFA secret to permanent storage
- success = database_functions.functions.save_mfa_secret(database_type, cnx, body.user_id, secret)
- if success:
- # Remove the temporary secret upon successful verification and storage
- del temp_mfa_secrets[body.user_id]
- logging.info(f"MFA secret successfully saved for user_id: {body.user_id}")
- return {"verified": True}
- else:
- # Handle unsuccessful save attempt (e.g., database error)
- logging.error("Failed to save MFA secret to database.")
- logging.error(f"Failed to save MFA secret for user_id: {body.user_id}")
- return JSONResponse(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- content={"message": "Failed to save MFA secret. Please try again."})
- except Exception as e:
- logging.error(f"Exception saving MFA secret: {e}")
- return JSONResponse(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- content={"message": "An error occurred. Please try again."})
- else:
- return {"verified": False}
- else:
- raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
- detail="You are not authorized to verify MFA for this user.")
-
-# Cleanup task for temp_mfa_secrets
-async def cleanup_temp_mfa_secrets():
- while True:
- # Wait for 1 hour before running cleanup
- await asyncio.sleep(3600)
- # Current timestamp
- current_time = time.time()
- # Iterate over the temp_mfa_secrets and remove entries older than 1 hour
- for user_id, (secret, timestamp) in list(temp_mfa_secrets.items()):
- if current_time - timestamp > 3600:
- del temp_mfa_secrets[user_id]
- logging.info("Cleanup task: Removed expired MFA setup entries.")
-
-
-class MfaSecretData(BaseModel):
- user_id: int
- mfa_secret: str
-
-
-@app.post("/api/data/save_mfa_secret")
-async def api_save_mfa_secret(data: MfaSecretData, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- logging.info(f"Received request to save MFA secret for user {data.user_id}")
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- logging.warning(f"Invalid API key: {api_key}")
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
- logging.info(f"Is web key: {is_web_key}")
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- logging.info(f"Key ID from API key: {key_id}")
-
- # Allow the action if the API key belongs to the user or it's the web API key
- if key_id == data.user_id or is_web_key:
- success = database_functions.functions.save_mfa_secret(database_type, cnx, data.user_id, data.mfa_secret)
- if success:
- logging.info("MFA secret saved successfully")
- return {"status": "success"}
- else:
- logging.error("Failed to save MFA secret")
- return {"status": "error"}
- else:
- logging.warning("Attempted to save MFA secret for another user")
- raise HTTPException(status_code=403,
- detail="You can only save MFA secrets for yourself!")
-
-@app.get("/api/data/check_mfa_enabled/{user_id}")
-async def api_check_mfa_enabled(user_id: int, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
-
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- elevated_access = await has_elevated_access(api_key, cnx)
-
- if not elevated_access:
- # Get user ID from API key
- user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if user_id != user_id_from_api_key:
- raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
- detail="You are not authorized to check mfa status for other users.")
- logging.info(f"Database Type: {database_type}, Connection: {cnx}, User ID: {user_id}")
-
- is_enabled = database_functions.functions.check_mfa_enabled(database_type, cnx, user_id)
- return {"mfa_enabled": is_enabled}
-
-
-class VerifyMFABody(BaseModel):
- user_id: int
- mfa_code: str
-
-
-@app.post("/api/data/verify_mfa")
-async def api_verify_mfa(body: VerifyMFABody, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- # Allow the action if the API key belongs to the user or it's the web API key
- if key_id == body.user_id or is_web_key:
- secret = database_functions.functions.get_mfa_secret(database_type, cnx, body.user_id)
-
- if secret is None:
- return {"verified": False}
- else:
- totp = TOTP(secret)
- verification_result = totp.verify(body.mfa_code)
- return {"verified": verification_result}
- else:
- raise HTTPException(status_code=403,
- detail="You can only verify your own login code!")
-
-
-class UserIDBody(BaseModel):
- user_id: int
-
-
-@app.delete("/api/data/delete_mfa")
-async def api_delete_mfa(body: UserIDBody, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
-
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- elevated_access = await has_elevated_access(api_key, cnx)
-
- if not elevated_access:
- # Get user ID from API key
- user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if body.user_id != user_id_from_api_key:
- raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
- detail="You are not authorized to access these user details")
-
- result = database_functions.functions.delete_mfa_secret(database_type, cnx, body.user_id)
- if result:
- return {"deleted": result}
- else:
- raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found")
-
-# Model for request data
-class TimeZoneInfo(BaseModel):
- user_id: int
- timezone: str
- hour_pref: int
- date_format: str
-
-
-# FastAPI endpoint
-@app.post("/api/data/setup_time_info")
-async def setup_timezone_info(data: TimeZoneInfo, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
-
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- elevated_access = await has_elevated_access(api_key, cnx)
-
- if not elevated_access:
- # Get user ID from API key
- user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if data.user_id != user_id_from_api_key:
- raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
- detail="You are not authorized to access these user details")
-
- success = database_functions.functions.setup_timezone_info(database_type, cnx, data.user_id, data.timezone,
- data.hour_pref, data.date_format)
- if success:
- return {"success": success}
- else:
- raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found")
-
-
-@app.get("/api/data/get_time_info")
-async def get_time_info(user_id: int, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
-
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- elevated_access = await has_elevated_access(api_key, cnx)
-
- if not elevated_access:
- # Get user ID from API key
- user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if user_id != user_id_from_api_key:
- raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
- detail="You are not authorized to access these user details")
- timezone, hour_pref, date_format = database_functions.functions.get_time_info(database_type, cnx, user_id)
- if timezone:
- return {"timezone": timezone, "hour_pref": hour_pref, "date_format": date_format}
- else:
- raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="User not found")
-
-
-@app.get("/api/data/first_login_done/{user_id}")
-async def first_login_done(user_id: int, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- # Allow the action if the API key belongs to the user or it's the web API key
- if key_id == user_id or is_web_key:
- first_login_status = database_functions.functions.first_login_done(database_type, cnx, user_id)
- return {"FirstLogin": first_login_status}
- else:
- raise HTTPException(status_code=403,
- detail="You can only run first login for yourself!")
-
-class SelectedEpisodesDelete(BaseModel):
- selected_episodes: List[int] = Field(..., title="List of Episode IDs")
- user_id: int = Field(..., title="User ID")
-
-
-@app.post("/api/data/delete_selected_episodes")
-async def delete_selected_episodes(data: SelectedEpisodesDelete, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- # Allow the action if the API key belongs to the user or it's the web API key
- if key_id == data.user_id or is_web_key:
- if is_valid_key:
- delete_status = database_functions.functions.delete_selected_episodes(cnx, database_type, data.selected_episodes,
- data.user_id)
- return {"status": delete_status}
- else:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
- else:
- raise HTTPException(status_code=403,
- detail="You can only delete your own selected episodes!")
-
-class SearchPodcastData(BaseModel):
- search_term: str
- user_id: int
-
-
-@app.post("/api/data/search_data")
-async def search_data(data: SearchPodcastData, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if is_valid_key:
- result = database_functions.functions.search_data(database_type, cnx, data.search_term, data.user_id)
- return {"data": result}
- else:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
-
-class QueuePodData(BaseModel):
- episode_id: int
- user_id: int
- is_youtube: bool = False
-
-@app.post("/api/data/queue_pod")
-async def queue_pod(data: QueuePodData, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if key_id == data.user_id or is_web_key:
- ep_status = database_functions.functions.check_queued(
- database_type, cnx, data.episode_id, data.user_id, data.is_youtube
- )
- if ep_status:
- return {"data": f"{'Video' if data.is_youtube else 'Episode'} already in queue"}
- else:
- result = database_functions.functions.queue_pod(
- database_type, cnx, data.episode_id, data.user_id, data.is_youtube
- )
- return {"data": result}
- else:
- raise HTTPException(status_code=403,
- detail=f"You can only add {'videos' if data.is_youtube else 'episodes'} to your own queue!")
-
-class QueueRmData(BaseModel):
- episode_id: int
- user_id: int
- is_youtube: bool = False
-
-@app.post("/api/data/remove_queued_pod")
-async def remove_queued_pod(data: QueueRmData, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if key_id == data.user_id or is_web_key:
- result = database_functions.functions.remove_queued_pod(
- database_type, cnx, data.episode_id, data.user_id, data.is_youtube
- )
- return result
- else:
- raise HTTPException(status_code=403,
- detail=f"You can only remove {'videos' if data.is_youtube else 'episodes'} for your own queue!")
-# class QueuedEpisodesData(BaseModel):
-# user_id: int
-
-
-@app.get("/api/data/get_queued_episodes")
-async def get_queued_episodes(user_id: int = Query(...), cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- # Allow the action if the API key belongs to the user or it's the web API key
- if key_id == user_id or is_web_key:
- result = database_functions.functions.get_queued_episodes(database_type, cnx, user_id)
- return {"data": result}
- else:
- raise HTTPException(status_code=403,
- detail="You can only get episodes from your own queue!")
-
-class ReorderRequest(BaseModel):
- episode_ids: List[int]
-
-@app.post("/api/data/reorder_queue")
-async def reorder_queue(request: ReorderRequest, user_id: int = Query(...), cnx=Depends(get_database_connection), api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- # Allow the action if the API key belongs to the user or it's the web API key
- if key_id == user_id or is_web_key:
- success = database_functions.functions.reorder_queued_episodes(database_type, cnx, user_id, request.episode_ids)
- if success:
- return {"message": "Queue reordered successfully"}
- else:
- raise HTTPException(status_code=500, detail="Failed to reorder the queue")
- else:
- raise HTTPException(status_code=403, detail="You can only reorder your own queue!")
-
-@app.get("/api/data/check_episode_in_db/{user_id}")
-async def check_episode_in_db(user_id: int, episode_title: str = Query(...), episode_url: str = Query(...), cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission")
-
- if database_functions.functions.id_from_api_key(cnx, database_type, api_key) != user_id:
- raise HTTPException(status_code=403, detail="You can only check episodes for your own account")
-
- episode_exists = database_functions.functions.check_episode_exists(cnx, database_type, user_id, episode_title, episode_url)
- return {"episode_in_db": episode_exists}
-
-@app.get("/api/data/get_pinepods_version")
-async def get_pinepods_version(cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- result = database_functions.functions.get_pinepods_version()
- return {"data": result}
-
-@app.post("/api/data/share_episode/{episode_id}")
-async def share_episode(episode_id: int, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- import uuid
- from datetime import datetime, timedelta
- # Verify API key validity
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have the correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- # Generate the URL key and expiration date
- url_key = str(uuid.uuid4()) # Generates a unique URL key
- expiration_date = datetime.utcnow() + timedelta(days=60) # Expire in 60 days
-
- # Call database function to insert the shared episode entry
- result = database_functions.functions.add_shared_episode(database_type, cnx, episode_id, url_key, expiration_date)
-
- if result:
- return {"url_key": url_key}
- else:
- raise HTTPException(status_code=500, detail="Failed to share episode")
-
-
-@app.get("/api/data/cleanup_tasks")
-async def api_cleanup_tasks(
- background_tasks: BackgroundTasks,
- is_admin: bool = Depends(check_if_admin)
-) -> Dict[str, str]:
- """
- Endpoint to trigger cleanup of old PeopleEpisodes and expired SharedEpisodes
- """
- background_tasks.add_task(cleanup_tasks)
- return {"detail": "Cleanup tasks initiated."}
-
-def cleanup_tasks():
- """
- Background task to run database cleanup operations
- """
- cnx = create_database_connection()
- try:
- database_functions.functions.cleanup_old_episodes(cnx, database_type)
- except Exception as e:
- print(f"Error during cleanup tasks: {str(e)}")
- finally:
- close_database_connection(cnx)
-
-@app.get("/api/data/update_playlists")
-async def api_update_playlists(
- background_tasks: BackgroundTasks,
- is_admin: bool = Depends(check_if_admin)
-) -> Dict[str, str]:
- """
- Endpoint to trigger playlist updates for all playlists (system and user-defined)
- """
- background_tasks.add_task(update_playlists_task)
- return {"detail": "Playlist update initiated."}
-
-def update_playlists_task():
- """
- Background task to update all playlists
- """
- print("Starting background playlist update task")
- try:
- cnx = create_database_connection()
- try:
- database_functions.functions.update_all_playlists(cnx, database_type)
- print("Background playlist update task completed successfully")
- except Exception as e:
- print(f"Error in update_all_playlists: {str(e)}")
- if hasattr(e, '__traceback__'):
- import traceback
- print(traceback.format_exc())
- finally:
- close_database_connection(cnx)
- except Exception as e:
- print(f"Critical error in update_playlists_task: {str(e)}")
- if hasattr(e, '__traceback__'):
- import traceback
- print(traceback.format_exc())
-
-
-class PlaylistCreate(BaseModel):
- name: str
- description: Optional[str]
- podcast_ids: Optional[List[int]]
- include_unplayed: bool = True
- include_partially_played: bool = True
- include_played: bool = False
- play_progress_min: Optional[float] = None # Made optional
- play_progress_max: Optional[float] = None # Made optional
- time_filter_hours: Optional[int] = None # Made optional
- min_duration: Optional[int]
- max_duration: Optional[int]
- sort_order: str = "date_desc"
- group_by_podcast: bool = False
- max_episodes: Optional[int]
- user_id: int
- icon_name: str = "ph-playlist"
-
-class PlaylistDelete(BaseModel):
- user_id: int
- playlist_id: int
-
-class PlaylistsGet(BaseModel):
- user_id: int
-
-@app.post("/api/data/create_playlist")
-async def api_create_playlist(
- data: PlaylistCreate,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-) -> Dict[str, Any]:
- """
- Create a new custom playlist
- """
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(
- status_code=403,
- detail="Your API key is either invalid or does not have correct permission"
- )
-
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if key_id == data.user_id or is_web_key:
- try:
- playlist_id = database_functions.functions.create_playlist(
- cnx,
- database_type,
- data
- )
- return {"detail": "Playlist created successfully", "playlist_id": playlist_id}
- except Exception as e:
- raise HTTPException(status_code=400, detail=str(e))
- else:
- raise HTTPException(
- status_code=403,
- detail="You can only create playlists for yourself!"
- )
-
-@app.delete("/api/data/delete_playlist")
-async def api_delete_playlist(
- data: PlaylistDelete,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-) -> Dict[str, str]:
- """
- Delete a playlist
- """
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(
- status_code=403,
- detail="Your API key is either invalid or does not have correct permission"
- )
-
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if key_id == data.user_id or is_web_key:
- try:
- database_functions.functions.delete_playlist(
- cnx,
- database_type,
- data.user_id,
- data.playlist_id
- )
- return {"detail": "Playlist deleted successfully"}
- except Exception as e:
- raise HTTPException(status_code=400, detail=str(e))
- else:
- raise HTTPException(
- status_code=403,
- detail="You can only delete your own playlists!"
- )
-
-@app.get("/api/data/get_playlists")
-async def api_get_playlists(
- user_id: int,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-) -> Dict[str, List[Dict[str, Any]]]:
- """
- Get all playlists accessible to the user
- """
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(
- status_code=403,
- detail="Your API key is either invalid or does not have correct permission"
- )
-
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if key_id == user_id or is_web_key:
- try:
- playlists = database_functions.functions.get_playlists(
- cnx,
- database_type,
- user_id
- )
- return {"playlists": playlists}
- except Exception as e:
- raise HTTPException(status_code=400, detail=str(e))
- else:
- raise HTTPException(
- status_code=403,
- detail="You can only view your own playlists!"
- )
-
-
-@app.get("/api/data/get_playlist_episodes")
-async def api_get_playlist_episodes(
- user_id: int,
- playlist_id: int,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-) -> Dict[str, Any]:
- """
- Get all episodes in a playlist
- """
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(
- status_code=403,
- detail="Your API key is either invalid or does not have correct permission"
- )
-
- try:
- return database_functions.functions.get_playlist_episodes(
- cnx,
- database_type,
- user_id,
- playlist_id
- )
- except Exception as e:
- raise HTTPException(status_code=400, detail=str(e))
-
-
-@app.get("/api/data/episode_by_url/{url_key}")
-async def get_episode_by_url_key(url_key: str, cnx=Depends(get_database_connection)):
- # Find the episode ID associated with the URL key
- print('running inside ep by url')
- episode_id = database_functions.functions.get_episode_id_by_url_key(database_type, cnx, url_key)
- print(f'outside dunc {episode_id}')
- if episode_id is None:
- raise HTTPException(status_code=404, detail="Invalid or expired URL key")
-
- # Now retrieve the episode metadata using the episode_id
- try:
- episode_data = database_functions.functions.get_episode_metadata_id(database_type, cnx, episode_id) # UserID is None because we are bypassing normal user auth for shared links
- return {"episode": episode_data}
- except ValueError as e:
- raise HTTPException(status_code=404, detail=str(e))
-
-
-class LoginInitiateData(BaseModel):
- user_id: int
- nextcloud_url: str
-
-@app.post("/api/data/initiate_nextcloud_login")
-async def initiate_nextcloud_login(data: LoginInitiateData, cnx=Depends(get_database_connection), api_key: str = Depends(get_api_key_from_header)):
- import requests
-
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- # Allow the action if the API key belongs to the user or it's the web API key
- if key_id == data.user_id or is_web_key:
- login_url = f"{data.nextcloud_url}/index.php/login/v2"
- try:
- response = requests.post(login_url)
- response.raise_for_status() # This will raise an HTTPError for bad responses
- return response.json()
- except requests.HTTPError as http_err:
- # Log the detailed error
- detail = f"Nextcloud login failed with status code {response.status_code}: {response.text}"
- raise HTTPException(status_code=response.status_code, detail=detail)
- except requests.RequestException as req_err:
- # General request exception handling (e.g., network issues)
- raise HTTPException(status_code=500, detail=f"Failed to reach Nextcloud server: {str(req_err)}")
- else:
- raise HTTPException(status_code=403, detail="You are not authorized to initiate this action.")
-
-class GpodderAuthRequest(BaseModel):
- gpodder_url: str
- gpodder_username: str
- gpodder_password: str
-
-@app.post("/api/data/verify_gpodder_auth")
-async def verify_gpodder_auth(request: GpodderAuthRequest):
- from requests.auth import HTTPBasicAuth
- auth = HTTPBasicAuth(request.gpodder_username, request.gpodder_password)
- async with httpx.AsyncClient() as client:
- try:
- response = await client.post(f"{request.gpodder_url}/api/2/auth/{request.gpodder_username}/login.json", auth=auth)
- response.raise_for_status() # Will raise an httpx.HTTPStatusError for 4XX/5XX responses
- if response.status_code == 200:
- return {"status": "success", "message": "Logged in!"}
- else:
- raise HTTPException(status_code=response.status_code, detail="Authentication failed")
- except httpx.HTTPStatusError as e:
- raise HTTPException(status_code=e.response.status_code, detail="Authentication failed")
- except Exception as e:
- raise HTTPException(status_code=500, detail="Internal Server Error")
-
-class GpodderSettings(BaseModel):
- user_id: int
- gpodder_url: str
- gpodder_token: str
-
-@app.post("/api/data/add_gpodder_settings")
-async def add_gpodder_settings(data: GpodderSettings, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- # Allow the action if the API key belongs to the user or it's the web API key
- if key_id == data.user_id or is_web_key:
- result = database_functions.functions.add_gpodder_settings(database_type, cnx, data.user_id, data.gpodder_url, data.gpodder_token)
- return {"data": result}
- else:
- raise HTTPException(status_code=403,
- detail="You can only add your own gpodder data!")
-
-class GpodderSettings(BaseModel):
- user_id: int
- gpodder_url: str
- gpodder_username: str
- gpodder_password: str
-
-
-@app.post("/api/data/add_gpodder_server")
-async def add_gpodder_server(
- data: GpodderSettings,
- background_tasks: BackgroundTasks,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if key_id == data.user_id or is_web_key:
- # First add the gpodder server
- result = database_functions.functions.add_gpodder_server(
- database_type,
- cnx,
- data.user_id,
- data.gpodder_url,
- data.gpodder_username,
- data.gpodder_password
- )
-
- # Get the user's gpodder settings - similar to what refresh_nextcloud_subscription does
- if database_type == "postgresql":
- cursor = cnx.cursor()
- cursor.execute('''
- SELECT "userid", "gpodderurl", "gpoddertoken", "gpodderloginname"
- FROM "Users"
- WHERE "userid" = %s AND "gpodderurl" IS NOT NULL
- ''', (data.user_id,))
- user = cursor.fetchone()
- else:
- cursor = cnx.cursor()
- cursor.execute('''
- SELECT UserID, GpodderUrl, GpodderToken, GpodderLoginName
- FROM Users
- WHERE UserID = %s AND GpodderUrl IS NOT NULL
- ''', (data.user_id,))
- user = cursor.fetchone()
-
- if user:
- if isinstance(user, dict):
- if database_type == "postgresql":
- gpodder_url = user["gpodderurl"]
- gpodder_token = user["gpoddertoken"]
- gpodder_login = user["gpodderloginname"]
- else:
- gpodder_url = user["GpodderUrl"]
- gpodder_token = user["GpodderToken"]
- gpodder_login = user["GpodderLoginName"]
- else:
- _, gpodder_url, gpodder_token, gpodder_login = user
-
- # Add the refresh task for just this user
- background_tasks.add_task(
- refresh_nextcloud_subscription_for_user,
- database_type,
- data.user_id,
- gpodder_url,
- gpodder_token,
- gpodder_login
- )
-
- return {"data": result}
- else:
- raise HTTPException(status_code=403,
- detail="You can only add your own gpodder data!")
-
-
-class RemoveGpodderSettings(BaseModel):
- user_id: int
-
-@app.post("/api/data/remove_gpodder_settings")
-async def remove_gpodder_settings(data: RemoveGpodderSettings, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- # Allow the action if the API key belongs to the user or it's the web API key
- if key_id == data.user_id or is_web_key:
- result = database_functions.functions.remove_gpodder_settings(database_type, cnx, data.user_id)
- return {"data": result}
- else:
- raise HTTPException(status_code=403,
- detail="You can only remove your own gpodder data!")
-
-@app.get("/api/data/check_gpodder_settings/{user_id}")
-async def check_gpodder_settings(user_id: int, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- # Allow the action if the API key belongs to the user or it's the web API key
- if key_id == user_id or is_web_key:
- result = database_functions.functions.check_gpodder_settings(database_type, cnx, user_id)
- return {"data": result}
- else:
- raise HTTPException(status_code=403,
- detail="You can only remove your own gpodder data!")
-
-@app.get("/api/data/get_gpodder_settings/{user_id}")
-async def get_gpodder_settings(user_id: int, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- # Allow the action if the API key belongs to the user or it's the web API key
- if key_id == user_id or is_web_key:
- result = database_functions.functions.get_gpodder_settings(database_type, cnx, user_id)
- return {"data": result}
- else:
- raise HTTPException(status_code=403,
- detail="You can only remove your own gpodder data!")
-
-
-class NextcloudAuthRequest(BaseModel):
- user_id: int
- token: str
- poll_endpoint: HttpUrl
- nextcloud_url: HttpUrl
-
-@app.post("/api/data/add_nextcloud_server")
-async def add_nextcloud_server(background_tasks: BackgroundTasks, data: NextcloudAuthRequest, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
-
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- elevated_access = await has_elevated_access(api_key, cnx)
-
- if not elevated_access:
- # Get user ID from API key
- user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if data.user_id != user_id_from_api_key:
- raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
- detail="You are not authorized to access these user details")
-
- # Reset gPodder settings to default
- database_functions.functions.remove_gpodder_settings(database_type, cnx, data.user_id)
-
- # Add the polling task to the background tasks
- background_tasks.add_task(poll_for_auth_completion_background, data, database_type)
-
- # Return 200 status code before starting to poll
- return {"status": "polling"}
-
-async def poll_for_auth_completion_background(data: NextcloudAuthRequest, database_type):
- # Create a new database connection
- cnx = create_database_connection()
-
- try:
- credentials = await poll_for_auth_completion(data.poll_endpoint, data.token)
- if credentials:
- logging.info(f"Nextcloud authentication successful: {credentials}")
- logging.info(f"Adding Nextcloud settings for user {data.user_id}")
- logging.info(f"Database Type: {database_type}, Connection: {cnx}, User ID: {data.user_id}")
- logging.info(f"Nextcloud URL: {data.nextcloud_url}, Token: {data.token}")
- result = database_functions.functions.add_gpodder_settings(database_type, cnx, data.user_id, str(data.nextcloud_url), credentials["appPassword"], credentials["loginName"], "nextcloud")
- if not result:
- logging.error("User not found")
- else:
- logging.error("Nextcloud authentication failed.")
- finally:
- # Close the database connection
- cnx.close()
-
-# Adjusted to use httpx for async HTTP requests
-async def poll_for_auth_completion(endpoint: HttpUrl, token: str):
- payload = {"token": token}
- timeout = 20 * 60 # 20 minutes timeout for polling
- async with httpx.AsyncClient() as client:
- start_time = asyncio.get_event_loop().time()
- while asyncio.get_event_loop().time() - start_time < timeout:
- try:
- response = await client.post(str(endpoint), json=payload, headers={"Content-Type": "application/json"})
- except httpx.ConnectTimeout:
- logging.info("Connection timed out, retrying...")
- logging.info(f"endpoint: {endpoint}, token: {token}")
- continue
- if response.status_code == 200:
- credentials = response.json()
- logging.info(f"Authentication successful: {credentials}")
- return credentials
- elif response.status_code == 404:
- await asyncio.sleep(5) # Non-blocking sleep
- else:
- logging.info(f"Polling failed with status code {response.status_code}")
- raise HTTPException(status_code=500, detail="Polling for Nextcloud authentication failed.")
- raise HTTPException(status_code=408, detail="Nextcloud authentication request timed out.")
-
-@app.get("/api/data/refresh_nextcloud_subscriptions")
-async def refresh_nextcloud_subscription(background_tasks: BackgroundTasks, is_admin: bool = Depends(check_if_admin), api_key: str = Depends(get_api_key_from_header)):
- cnx = create_database_connection()
- try:
- users = database_functions.functions.get_nextcloud_users(database_type, cnx)
- finally:
- close_database_connection(cnx)
- for user in users:
- # Handle both dictionary and tuple cases
- if isinstance(user, dict):
- if database_type == "postgresql":
- user_id = user["userid"]
- gpodder_url = user["gpodderurl"]
- gpodder_token = user["gpoddertoken"]
- gpodder_login = user["gpodderloginname"]
- sync_type = user.get("pod_sync_type", "None")
- else:
- user_id = user["UserID"]
- gpodder_url = user["GpodderUrl"]
- gpodder_token = user["GpodderToken"]
- gpodder_login = user["GpodderLoginName"]
- sync_type = user.get("Pod_Sync_Type", "None")
- else: # assuming tuple
- # Now handle 5 values instead of 4
- if len(user) >= 5:
- user_id, gpodder_url, gpodder_token, gpodder_login, sync_type = user
- else:
- user_id, gpodder_url, gpodder_token, gpodder_login = user
- sync_type = "None"
-
- # Pass the sync_type to the refresh function
- background_tasks.add_task(
- refresh_nextcloud_subscription_for_user,
- database_type,
- user_id,
- gpodder_url,
- gpodder_token,
- gpodder_login,
- sync_type # Add this parameter
- )
- return {"status": "success", "message": "Nextcloud subscriptions refresh initiated."}
-
-def refresh_nextcloud_subscription_for_user(database_type, user_id, gpodder_url, gpodder_token, gpodder_login, sync_type=None):
- cnx = create_database_connection()
- try:
- # If sync_type wasn't passed, try to get it from the database
- if not sync_type:
- sync_type = database_functions.functions.get_gpodder_type(cnx, database_type, user_id)
-
- # Determine if this is internal based on URL
- is_internal = gpodder_url == "http://localhost:8042"
- print(f"Using {'internal' if is_internal else 'external'} gpodder API for user {user_id}")
-
- # Special handling for nextcloud sync
- if sync_type == "nextcloud":
- print(f"Using nextcloud gpodder API for user {user_id}")
- success = database_functions.functions.refresh_nextcloud_subscription(
- database_type,
- cnx,
- user_id,
- gpodder_url,
- gpodder_token,
- gpodder_login,
- sync_type
- )
- return success
- # For all other GPodder sync types, use the standard refresh function
- elif sync_type in ["gpodder", "both", "external"]:
- # Get default device ID
- device_id = database_functions.functions.get_or_create_default_device(cnx, database_type, user_id)
-
- # Get device name if we have a device ID
- device_name = None
- if device_id:
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'SELECT DeviceName FROM "GpodderDevices" WHERE DeviceID = %s'
- else:
- query = "SELECT DeviceName FROM GpodderDevices WHERE DeviceID = %s"
-
- cursor.execute(query, (device_id,))
- result = cursor.fetchone()
- cursor.close()
-
- if result:
- device_name = result[0] if isinstance(result, tuple) else result["devicename"]
-
- # Determine if this is a remote sync
- is_remote = not is_internal and sync_type in ["external", "both"]
-
- success = database_functions.functions.refresh_gpodder_subscription(
- database_type,
- cnx,
- user_id,
- gpodder_url,
- gpodder_token,
- gpodder_login,
- sync_type,
- device_id,
- device_name,
- is_remote
- )
- return success
- else:
- print(f"GPodder sync not enabled for user {user_id} (sync_type: {sync_type})")
- return False
- finally:
- close_database_connection(cnx)
-
-class RemoveSyncRequest(BaseModel):
- user_id: int
-
-@app.delete("/api/data/remove_podcast_sync")
-async def remove_podcast_sync(data: RemoveSyncRequest, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the user has permission to modify this user's data
- elevated_access = await has_elevated_access(api_key, cnx)
- if not elevated_access:
- user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- if data.user_id != user_id_from_api_key:
- raise HTTPException(status_code=status.HTTP_403_FORBIDDEN,
- detail="You are not authorized to modify these user settings")
-
- # Remove the sync settings
- database_functions.functions.remove_gpodder_settings(database_type, cnx, data.user_id)
-
- return {"success": True, "message": "Podcast sync settings removed successfully"}
-
-def check_valid_feed(feed_url: str, username: Optional[str] = None, password: Optional[str] = None):
- """
- Check if the provided URL points to a valid podcast feed.
- Uses both direct content-type checking and feedparser validation.
-
- Args:
- feed_url: URL of the podcast feed
- username: Optional username for authenticated feeds
- password: Optional password for authenticated feeds
-
- Returns:
- feedparser.FeedParserDict: The parsed feed if valid
-
- Raises:
- ValueError: If the feed is invalid or inaccessible
- """
- import feedparser
- import requests
- from requests.auth import HTTPBasicAuth
- from typing import Optional
-
- # Common podcast feed content types
- VALID_CONTENT_TYPES = [
- 'application/xml',
- 'text/xml',
- 'application/rss+xml',
- 'application/atom+xml',
- 'application/rdf+xml',
- ]
-
- def is_valid_content_type(content_type: str) -> bool:
- """Check if the content type indicates XML content."""
- content_type = content_type.lower().split(';')[0].strip()
- return any(valid_type in content_type for valid_type in VALID_CONTENT_TYPES) or 'xml' in content_type
-
- # Use requests to fetch the feed content
- try:
- # Set multiple user agents and accept headers to improve compatibility
- headers = {
- 'User-Agent': 'Mozilla/5.0 (compatible; PodcastApp/1.0; +https://example.com)',
- 'Accept': 'application/rss+xml, application/atom+xml, application/xml, text/xml, */*'
- }
-
- # Handle authentication if provided
- auth = HTTPBasicAuth(username, password) if username and password else None
-
- # Make the request with a timeout
- response = requests.get(
- feed_url,
- headers=headers,
- auth=auth,
- timeout=10,
- allow_redirects=True
- )
- response.raise_for_status()
-
- # Get content type, handling cases where it might not be present
- content_type = response.headers.get('Content-Type', '').lower()
-
- # Special handling for feeds that don't properly set content type
- if not is_valid_content_type(content_type):
- # Try to parse it anyway - some feeds might be valid despite wrong content type
- feed_content = response.content
- parsed_feed = feedparser.parse(feed_content)
-
- # If we can parse it and it has required elements, accept it despite content type
- if (parsed_feed.get('version') and
- 'title' in parsed_feed.feed and
- 'link' in parsed_feed.feed):
- return parsed_feed
-
- # If we can't parse it, then it's probably actually invalid
- raise ValueError(
- f"Unexpected Content-Type: {content_type}. "
- "The feed URL must point to an XML feed file."
- )
-
- feed_content = response.content
-
- except requests.RequestException as e:
- raise ValueError(f"Error fetching the feed: {str(e)}")
-
- # Parse the feed content using feedparser
- parsed_feed = feedparser.parse(feed_content)
-
- # Check for feedparser errors
- if parsed_feed.get('bozo') == 1:
- exception = parsed_feed.get('bozo_exception')
- if exception:
- raise ValueError(f"Feed parsing error: {str(exception)}")
-
- # Validate the parsed feed has required elements
- if not parsed_feed.get('version'):
- raise ValueError("Invalid podcast feed URL or content: Could not determine feed version.")
-
- required_attributes = ['title', 'link']
- missing_attributes = [attr for attr in required_attributes if attr not in parsed_feed.feed]
-
- if missing_attributes:
- raise ValueError(
- f"Feed missing required attributes: {', '.join(missing_attributes)}. "
- "The URL must point to a valid podcast feed."
- )
-
- # Check for podcast-specific elements
- has_items = len(parsed_feed.entries) > 0
- if not has_items:
- raise ValueError("Feed contains no episodes.")
-
- return parsed_feed
-
-
-
-class CustomPodcast(BaseModel):
- feed_url: str
- user_id: int
- username: Optional[str] = None
- password: Optional[str] = None
-
-@app.post("/api/data/add_custom_podcast")
-async def add_custom_pod(data: CustomPodcast, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- # Allow the action if the API key belongs to the user or it's the web API key
- if key_id == data.user_id or is_web_key:
- try:
- parsed_feed = check_valid_feed(data.feed_url, data.username, data.password)
- except ValueError as e:
- logger.error(f"Failed to parse: {str(e)}")
- raise HTTPException(status_code=400, detail=str(e))
-
- # Assuming the rest of the code processes the podcast correctly
- try:
- podcast_id = database_functions.functions.add_custom_podcast(database_type, cnx, data.feed_url, data.user_id, data.username, data.password)
- print('custom done')
- podcast_details = database_functions.functions.get_podcast_details(database_type, cnx, data.user_id, podcast_id)
- return {"data": podcast_details}
- except Exception as e:
- logger.error(f"Failed to process the podcast: {str(e)}")
- raise HTTPException(status_code=500, detail=f"Failed to process the podcast: {str(e)}")
- else:
- raise HTTPException(status_code=403,
- detail="You can only add podcasts for yourself!")
-
-class PersonEpisodesRequest(BaseModel):
- user_id: int
- person_id: int
-
-@app.get("/api/data/person/episodes/{user_id}/{person_id}")
-async def api_return_person_episodes(
- user_id: int,
- person_id: int,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(
- status_code=403,
- detail="Your API key is either invalid or does not have correct permission"
- )
-
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if key_id == user_id or is_web_key:
- episodes = database_functions.functions.return_person_episodes(database_type, cnx, user_id, person_id)
- if episodes is None:
- episodes = []
- return {"episodes": episodes}
- else:
- raise HTTPException(
- status_code=403,
- detail="You can only view episodes for your own subscriptions!"
- )
-
-@app.get("/api/data/refresh_hosts")
-async def refresh_all_hosts(
- background_tasks: BackgroundTasks,
- cnx=Depends(get_database_connection), is_admin: bool = Depends(check_if_admin),
- api_key: str = Depends(get_api_key_from_header),
-):
- """Refresh episodes for all subscribed hosts"""
- # Verify it's the system/web API key
- if api_key != base_webkey.web_key:
- raise HTTPException(status_code=403, detail="This endpoint requires system API key")
- try:
- cursor = cnx.cursor()
- # Get all unique people that users are subscribed to
- if database_type == "postgresql":
- cursor.execute("""
- SELECT DISTINCT p.PersonID, p.Name, p.UserID
- FROM "People" p
- """)
- else: # MySQL
- cursor.execute("""
- SELECT DISTINCT p.PersonID, p.Name, p.UserID
- FROM People p
- """)
-
- subscribed_hosts = cursor.fetchall()
- if not subscribed_hosts:
- return {"message": "No subscribed hosts found"}
-
- # Process each host in the background
- hosts_to_process = []
- for host in subscribed_hosts:
- # Handle both tuple and dict result formats
- if isinstance(host, dict):
- person_id = host.get('PersonID', host.get('personid'))
- person_name = host.get('Name', host.get('name'))
- user_id = host.get('UserID', host.get('userid'))
- else: # tuple
- person_id, person_name, user_id = host
-
- hosts_to_process.append(person_name)
- background_tasks.add_task(
- process_person_subscription_task,
- user_id,
- person_id,
- person_name
- )
-
- return {
- "message": f"Refresh initiated for {len(subscribed_hosts)} hosts",
- "hosts": hosts_to_process
- }
- except Exception as e:
- logging.error(f"Error refreshing hosts: {str(e)}")
- raise HTTPException(status_code=500, detail=str(e))
-
-class PersonSubscribeRequest(BaseModel):
- person_name: str
- person_img: str
- podcast_id: int
-
-@app.post("/api/data/person/subscribe/{user_id}/{person_id}")
-async def api_subscribe_to_person(
- user_id: int,
- person_id: int,
- request: PersonSubscribeRequest,
- background_tasks: BackgroundTasks,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Invalid or unauthorized API key")
-
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if key_id == user_id or is_web_key:
- success, db_person_id = database_functions.functions.subscribe_to_person(
- cnx,
- database_type,
- user_id,
- person_id,
- request.person_name,
- request.person_img,
- request.podcast_id
- )
-
- if success:
- # Add background task to process the subscription using the actual PersonID
- background_tasks.add_task(
- process_person_subscription_task,
- user_id,
- db_person_id, # Use the actual PersonID from the database
- request.person_name
- )
- return {
- "message": "Successfully subscribed to person",
- "person_id": db_person_id # Return the actual person ID
- }
- else:
- raise HTTPException(status_code=400, detail="Failed to subscribe to person")
- else:
- raise HTTPException(status_code=403, detail="You can only subscribe for yourself!")
-
-class UniqueShow(TypedDict):
- title: str
- feed_url: str
- feed_id: int
-
-def process_person_subscription_task(
- user_id: int,
- person_id: int,
- person_name: str
-) -> None:
- """Regular synchronous task for processing person subscription"""
- cnx = create_database_connection()
- try:
- # Run the async function in a new event loop
- loop = asyncio.new_event_loop()
- asyncio.set_event_loop(loop)
- loop.run_until_complete(
- process_person_subscription(user_id, person_id, person_name, cnx)
- )
- loop.close()
- # After successful person subscription processing, trigger a server refresh
- print("Person subscription processed, initiating server refresh...")
- try:
- refresh_pods_task()
- print("Server refresh completed successfully")
- except Exception as refresh_error:
- print(f"Error during server refresh: {refresh_error}")
- # Don't raise the error here - we don't want to fail the whole operation
- # if just the refresh fails
- pass
- except Exception as e:
- print(f"Error in process_person_subscription_task: {e}")
- raise
- finally:
- close_database_connection(cnx)
-
-async def process_person_subscription(
- user_id: int,
- person_id: int,
- person_name: str,
- cnx
-) -> None:
- """Async function to process person subscription and gather their shows"""
- print(f"Starting refresh for host: {person_name} (ID: {person_id})")
- try:
- # Set of unique shows (title, feed_url, feed_id)
- processed_shows: Set[Tuple[str, str, int]] = set()
-
- # 1. Get podcasts from podpeople
- async with httpx.AsyncClient(timeout=30.0) as client:
- try:
- podpeople_response = await client.get(
- f"{people_url}/api/hostsearch",
- params={"name": person_name}
- )
- podpeople_response.raise_for_status()
- podpeople_data = podpeople_response.json()
-
- # Check if we got valid data
- if podpeople_data and podpeople_data.get("success"):
- for podcast in podpeople_data.get("podcasts", []):
- processed_shows.add((
- podcast['title'],
- podcast['feed_url'],
- podcast['id']
- ))
- except Exception as e:
- print(f"Error getting data from podpeople: {str(e)}")
- # Continue execution even if podpeople lookup fails
- pass
-
- # 2. Get podcasts from podcast index
- print(f"API URL configured as: {api_url}")
- async with httpx.AsyncClient(timeout=30.0) as client:
- try:
- index_response = await client.get(
- f"{api_url}",
- params={
- "query": person_name,
- "index": "person",
- "search_type": "person"
- }
- )
- index_response.raise_for_status()
- index_data = index_response.json()
-
- if index_data and "items" in index_data:
- for episode in index_data["items"]:
- if all(field is not None for field in [episode.get("feedTitle"), episode.get("feedUrl"), episode.get("feedId")]):
- processed_shows.add((
- episode["feedTitle"],
- episode["feedUrl"],
- episode["feedId"]
- ))
- except Exception as e:
- print(f"Error getting data from podcast index: {str(e)}")
- # Continue execution even if podcast index lookup fails
- pass
-
- # Only continue if we found any shows
- if not processed_shows:
- print(f"No shows found for person: {person_name}")
- return
-
- # 3. Process each unique show
- for title, feed_url, feed_id in processed_shows:
- try:
- # First check if podcast exists for user
- user_podcast_id = database_functions.functions.get_podcast_id(
- database_type,
- cnx,
- user_id,
- feed_url,
- title
- )
-
- # Get podcast details and add as system podcast
- podcast_values = database_functions.app_functions.get_podcast_values(
- feed_url,
- 1, # System UserID
- None,
- None,
- False
- )
-
- if not user_podcast_id:
- # Check if system podcast exists (UserID = 0)
- system_podcast_id = database_functions.functions.get_podcast_id(
- database_type,
- cnx,
- 1, # System UserID
- feed_url,
- title
- )
-
- if system_podcast_id is None:
- # If not found for system, add as a new system podcast
- podcast_values = database_functions.app_functions.get_podcast_values(
- feed_url,
- 1, # System UserID
- None,
- None,
- False
- )
- success = database_functions.functions.add_person_podcast(
- cnx,
- database_type,
- podcast_values,
- 1 # System UserID
- )
- if success:
- # Get the newly created podcast ID
- system_podcast_id = database_functions.functions.get_podcast_id(
- database_type,
- cnx,
- 1, # System UserID
- feed_url,
- title
- )
- podcast_id = system_podcast_id
- else:
- podcast_id = user_podcast_id
-
- print(f"Using podcast: ID={podcast_id}, Title={title}")
- # 4. Add episodes to PeopleEpisodes
- database_functions.functions.add_people_episodes(
- cnx,
- database_type,
- person_id=person_id,
- podcast_id=podcast_id,
- feed_url=feed_url,
- )
-
- except Exception as e:
- logging.error(f"Error processing show {title}: {str(e)}")
- continue
-
- except Exception as e:
- logging.error(f"Error processing person subscription: {str(e)}")
- raise
-
-class UnsubscribeRequest(BaseModel):
- person_name: str
-
-@app.delete("/api/data/person/unsubscribe/{user_id}/{person_id}")
-async def api_unsubscribe_from_person(
- user_id: int,
- person_id: int,
- request: UnsubscribeRequest,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Invalid or unauthorized API key")
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- if key_id == user_id or is_web_key:
- success = database_functions.functions.unsubscribe_from_person(cnx, database_type, user_id, person_id, request.person_name)
- if success:
- return {"message": "Successfully unsubscribed from person"}
- else:
- raise HTTPException(status_code=400, detail="Failed to unsubscribe from person")
- else:
- raise HTTPException(status_code=403, detail="You can only unsubscribe for yourself!")
-
-@app.get("/api/data/person/subscriptions/{user_id}")
-async def api_get_person_subscriptions(
- user_id: int,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403, detail="Invalid or unauthorized API key")
-
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- if key_id == user_id or is_web_key:
- subscriptions = database_functions.functions.get_person_subscriptions(cnx, database_type, user_id)
- return {"subscriptions": subscriptions}
- else:
- raise HTTPException(status_code=403, detail="You can only view your own subscriptions!")
-
-
-@app.get("/api/data/stream/{episode_id}")
-async def stream_episode(
- episode_id: int,
- cnx=Depends(get_database_connection),
- api_key: str = Query(..., alias='api_key'),
- user_id: int = Query(..., alias='user_id'),
- source_type: str = Query(None, alias='type')
-):
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- if not key_id and not is_web_key:
- rss_key = database_functions.functions.get_rss_key_if_valid(cnx, database_type, api_key)
- if not rss_key:
- raise HTTPException(status_code=403, detail="Invalid API key")
- key_id = rss_key.get('user_id')
- universal_key = (not rss_key.get('podcast_ids') or len(rss_key.get('podcast_ids')) == 0 or -1 in rss_key.get('podcast_ids'))
- if not universal_key and not database_functions.functions.validate_episode_access(cnx, database_type, episode_id, rss_key.get('podcast_ids')):
- raise HTTPException(status_code=403, detail="You do not have permission to access this episode")
-
- if key_id == user_id or is_web_key:
- # Choose which lookup to use based on source_type
- if source_type == "youtube":
- file_path = database_functions.functions.get_youtube_video_location(cnx, database_type, episode_id, user_id)
- print(f'file path in if source youtube {file_path}')
- else:
- file_path = database_functions.functions.get_download_location(cnx, database_type, episode_id, user_id)
- print(f'file path in if source else {file_path}')
-
- if file_path:
- # Don't set filename to allow streaming instead of forced download
- return FileResponse(path=file_path, media_type='audio/mpeg')
- else:
- raise HTTPException(status_code=404, detail="Episode not found or not downloaded")
- else:
- raise HTTPException(status_code=403, detail="You do not have permission to access this episode")
-
-class UpdateGpodderSyncRequest(BaseModel):
- enabled: bool
-
-@app.post("/api/data/gpodder/toggle")
-async def toggle_gpodder_sync(
- request: UpdateGpodderSyncRequest,
- background_tasks: BackgroundTasks,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- """Enable or disable gpodder sync for the current user"""
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(
- status_code=403,
- detail="Your API key is either invalid or does not have correct permission"
- )
- # Get the user ID from the API key
- user_id_result = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- print(f"User ID result: {user_id_result}")
- if isinstance(user_id_result, dict):
- user_id = user_id_result.get('userid')
- else:
- user_id = user_id_result[0] if isinstance(user_id_result, tuple) else user_id_result
- if not user_id:
- raise HTTPException(status_code=403, detail="Invalid API key")
- try:
- print(f"Request to toggle gpodder sync: {request.enabled}")
- user_data = database_functions.functions.get_user_gpodder_status(cnx, database_type, user_id)
- if not user_data:
- raise HTTPException(status_code=404, detail="User not found")
- # Get initial state
- current_sync_type = user_data["sync_type"]
- print(f"Current sync type: {current_sync_type}")
- device_info = None
- if request.enabled:
- # Enable gpodder sync
- result = database_functions.functions.set_gpodder_internal_sync(cnx, database_type, user_id)
- if not result:
- raise HTTPException(status_code=500, detail="Failed to enable gpodder sync")
- device_info = result
-
- # Get required parameters for refresh_gpodder_subscription
- gpodder_settings = database_functions.functions.get_gpodder_settings(database_type, cnx, user_id)
- gpodder_token = gpodder_settings.get("gpoddertoken", "")
- gpodder_login = gpodder_settings.get("gpodderloginname", "")
- print(gpodder_settings)
-
- # Get the updated sync type after enabling
- updated_user_data = database_functions.functions.get_user_gpodder_status(cnx, database_type, user_id)
- updated_sync_type = updated_user_data["sync_type"]
- # gpodder_login = gpodder_settings.get("gpodderloginname", "")
- # gpodder_token = gpodder_settings.get("gpoddertoken", "")
- device_id = device_info.get("device_id") if device_info else None
- device_name = device_info.get("device_name") if device_info else None
-
- background_tasks.add_task(
- refresh_gpodder_subscription_for_background, # Use the wrapper function
- database_type,
- user_id,
- 'http://localhost:8042',
- gpodder_token,
- gpodder_login,
- updated_sync_type,
- device_id,
- device_name,
- False # is_remote
- )
- print(f"Added background task to sync gpodder for user: {user_id}")
- else:
- # Disable gpodder sync
- success = database_functions.functions.disable_gpodder_internal_sync(cnx, database_type, user_id)
- if not success:
- raise HTTPException(status_code=500, detail="Failed to disable gpodder sync")
- # Get updated state after changes
- updated_data = database_functions.functions.get_user_gpodder_status(cnx, database_type, user_id)
- new_sync_type = updated_data["sync_type"]
- print(f"Updated sync type: {new_sync_type}")
- response = {
- "sync_type": new_sync_type,
- "gpodder_enabled": new_sync_type in ["gpodder", "both"],
- "external_enabled": new_sync_type in ["external", "both"],
- "external_url": updated_data.get("gpodder_url") if new_sync_type in ["external", "both"] else None,
- "api_url": "http://localhost:8042" if new_sync_type in ["gpodder", "both"] else None
- }
- # Add device information if available
- if device_info and request.enabled:
- response["device_name"] = device_info["device_name"]
- response["device_id"] = device_info["device_id"]
- print(f"Returning response: {response}")
- return response
- except Exception as e:
- print(f"Error in toggle_gpodder_sync: {e}")
- raise HTTPException(status_code=500, detail=f"Internal server error: {str(e)}")
-
-
-def refresh_gpodder_subscription_for_background(database_type, user_id, gpodder_url, gpodder_token,
- gpodder_login, sync_type, device_id=None, device_name=None, is_remote=False):
- """Wrapper function for background tasks to ensure proper database connection handling"""
- from database_functions.db_client import create_database_connection, close_database_connection
- import logging
-
- logger = logging.getLogger(__name__)
-
- # Create a new connection explicitly for this background task
- cnx = create_database_connection()
-
- try:
- print(f"Starting background refresh for user {user_id} with sync_type {sync_type}")
- # Call the original function with our managed connection
- success = database_functions.functions.refresh_gpodder_subscription(
- database_type,
- cnx,
- user_id,
- gpodder_url,
- gpodder_token,
- gpodder_login,
- sync_type,
- device_id,
- device_name,
- is_remote
- )
- return success
- except Exception as e:
- logger.error(f"Error in background gpodder refresh: {str(e)}")
- return False
- finally:
- # Always close the connection we created
- close_database_connection(cnx)
- print(f"Closed database connection for background task for user {user_id}")
-# Helper function to generate a token for internal gpodder API
-def generate_gpodder_token(user_id):
- import secrets
- token = secrets.token_hex(16)
- return f"internal_gpodder_{user_id}_{token}"
-
-@app.get("/api/data/gpodder/status")
-async def get_gpodder_status(
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- """Get the current gpodder sync status for the user"""
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(
- status_code=403,
- detail="Your API key is either invalid or does not have correct permission"
- )
-
- # Get the user ID from the API key
- user_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- if not user_id:
- raise HTTPException(status_code=403, detail="Invalid API key")
-
- try:
- user_data = database_functions.functions.get_user_gpodder_status(cnx, database_type, user_id)
-
- if not user_data:
- raise HTTPException(status_code=404, detail="User not found")
-
- sync_type = user_data["sync_type"]
-
- return {
- "sync_type": sync_type,
- "gpodder_enabled": sync_type in ["gpodder", "both"],
- "external_enabled": sync_type in ["external", "both"],
- "external_url": user_data["gpodder_url"],
- "api_url": "http://localhost:8042" # Replace with actual API URL if needed
- }
- except Exception as e:
- print(f"Error in get_gpodder_status: {e}")
- raise HTTPException(status_code=500, detail=f"Internal server error: {str(e)}")
-
-class BackupUser(BaseModel):
- user_id: int
-
-
-@app.post("/api/data/backup_user", response_class=PlainTextResponse)
-async def backup_user(data: BackupUser, cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)):
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(status_code=403,
- detail="Your API key is either invalid or does not have correct permission")
-
- # Check if the provided API key is the web key
- is_web_key = api_key == base_webkey.web_key
-
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- # Allow the action if the API key belongs to the user or it's the web API key
- if key_id == data.user_id or is_web_key:
- try:
- opml_data = database_functions.functions.backup_user(database_type, cnx, data.user_id)
- except Exception as e:
- raise HTTPException(status_code=400, detail=str(e))
- return opml_data
- else:
- raise HTTPException(status_code=403,
- detail="You can only make backups for yourself!")
-
-
-class BackupServerRequest(BaseModel):
- database_pass: str
-
-@app.post("/api/data/backup_server", response_class=PlainTextResponse)
-async def backup_server(request: BackupServerRequest, is_admin: bool = Depends(check_if_admin), cnx=Depends(get_database_connection)):
- # logging.info(f"request: {request}")
- if not is_admin:
- raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Not authorized")
- try:
- dump_data = database_functions.functions.backup_server(database_type, cnx, request.database_pass)
- except Exception as e:
- raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e))
- return Response(content=dump_data, media_type="text/plain")
-
-@app.post("/api/data/restore_server")
-async def api_restore_server(
- background_tasks: BackgroundTasks,
- backup_file: UploadFile,
- database_pass: str = Form(...),
- is_admin: bool = Depends(check_if_admin),
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- if not is_admin:
- raise HTTPException(status_code=403, detail="Not authorized")
-
- if not backup_file.filename.endswith('.sql'):
- raise HTTPException(status_code=400, detail="Invalid file type. Only .sql files are allowed")
-
- file_content = await backup_file.read()
- if len(file_content) > 100 * 1024 * 1024: # 100MB limit
- raise HTTPException(status_code=413, detail="File too large")
-
- logging.info(f"Restoring server with uploaded backup file")
- background_tasks.add_task(restore_server_fun, database_pass, file_content)
- return JSONResponse(content={"detail": "Server restoration started."})
-
-def restore_server_fun(database_pass: str, server_restore_data: str):
- # Assuming create_database_connection and restore_server are defined in database_functions.functions
- cnx = create_database_connection() # Replace with your method to create a new DB connection
- try:
- # Restore server using the provided password and data
- database_functions.functions.restore_server(cnx, database_pass, server_restore_data)
- finally:
- cnx.close()
-
-@app.get("/api/data/rss_feed_status")
-async def get_rss_feed_status(
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- """Get RSS feed enabled status for current user"""
- try:
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- print(f'user_id for rss: {key_id}')
- if not key_id:
- raise HTTPException(status_code=403, detail="Invalid API key")
-
- status = database_functions.functions.get_rss_feed_status(cnx, database_type, key_id)
- print(status)
- return status
-
- except Exception as e:
- raise HTTPException(status_code=500, detail=str(e))
-
-@app.post("/api/data/toggle_rss_feeds")
-async def toggle_rss_feeds_endpoint(
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- """Toggle RSS feed status for current user"""
- try:
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- if not key_id:
- raise HTTPException(status_code=403, detail="Invalid API key")
- new_status = database_functions.functions.toggle_rss_feeds(cnx, database_type, key_id)
- return {"success": True, "enabled": new_status}
- except Exception as e:
- raise HTTPException(status_code=500, detail=str(e))
-
-@app.get("/api/feed/{user_id}")
-async def get_user_feed(
- request: Request,
- user_id: int,
- api_key: str, # Now a query parameter
- limit: int = 1000,
- podcast_id: Optional[int] = None,
- source_type: str = Query(None, alias='type'),
- cnx=Depends(get_database_connection)
-):
- """Get RSS feed for all podcasts or a specific podcast"""
- print(f'user: {user_id}, api: {api_key}')
- print(f'podcast_id parameter: {podcast_id}, type: {type(podcast_id)}')
- print(f'podcast_id_list will be: {[podcast_id] if podcast_id is not None else None}')
- try:
- domain = os.getenv('HOSTNAME', f'{request.url.scheme}://{request.url.hostname}:{request.url.port or 80}')
-
-
- # Convert single podcast_id to list format if provided
- podcast_id_list = [podcast_id] if podcast_id is not None else None
-
- rss_key = database_functions.functions.get_rss_key_if_valid(cnx, database_type, api_key, podcast_id_list)
-
- # TODO: remove this once backwards compatibility is no longer needed
- if not rss_key:
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- if not key_id:
- raise HTTPException(status_code=403, detail="Invalid API key")
- rss_key = {
- "podcast_ids": [ -1 ],
- "user_id": key_id,
- "key": api_key
- }
-
- feed_content = database_functions.functions.generate_podcast_rss(
- database_type,
- cnx,
- rss_key,
- limit,
- source_type,
- domain,
- podcast_id=podcast_id_list
- )
- return Response(
- content=feed_content,
- media_type="application/rss+xml"
- )
- except Exception as e:
- raise HTTPException(status_code=500, detail=str(e))
-
-@app.post("/api/data/rss_feed_status/{user_id}")
-async def toggle_rss_feeds(
- user_id: int,
- enable: bool,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- """Enable or disable RSS feeds for a user"""
- try:
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- if not key_id:
- raise HTTPException(status_code=403, detail="Invalid API key")
-
- new_status = database_functions.functions.set_rss_feed_status(cnx, database_type, user_id, enable)
- return {"status": "success", "enabled": new_status}
-
- except Exception as e:
- raise HTTPException(status_code=500, detail=str(e))
-
-
-@app.get("/api/data/rss_key")
-async def get_user_rss_key(
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- """Get the RSS key for the current user"""
- try:
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- if not key_id:
- raise HTTPException(status_code=403, detail="Invalid API key")
-
- rss_key = database_functions.functions.get_user_rss_key(cnx, database_type, key_id)
- if not rss_key:
- raise HTTPException(status_code=404, detail="No RSS key found. Please enable RSS feeds first.")
-
- return {"rss_key": rss_key}
-
- except Exception as e:
- raise HTTPException(status_code=500, detail=str(e))
-
-
-class YouTubeChannel(BaseModel):
- channel_id: str
- name: str
- description: str
- subscriber_count: Optional[int]
- url: str
- video_count: Optional[int]
- thumbnail_url: Optional[str]
- recent_videos: List[dict] = []
-
- class Config:
- json_encoders = {
- list: lambda v: v # Preserve lists during JSON encoding
- }
-
-@app.get("/api/data/search_youtube_channels")
-async def search_youtube_channels(
- query: str,
- max_results: int = 5,
- user_id: int = None,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- # Validate API key
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(
- status_code=403,
- detail="Your API key is either invalid or does not have correct permission"
- )
-
- # Check if web key and verify user permission
- is_web_key = api_key == base_webkey.web_key
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- if not (key_id == user_id or is_web_key):
- raise HTTPException(
- status_code=403,
- detail="You can only search with your own account."
- )
-
- try:
- # First get channel ID using a search
- search_url = f"ytsearch{max_results*4}:{query}"
-
- ydl_opts = {
- 'quiet': True,
- 'extract_flat': True,
- 'no_warnings': True,
- 'skip_download': True,
- 'extract_info': True,
- }
-
- with YoutubeDL(ydl_opts) as ydl:
- logging.info(f"Searching YouTube with query: {query}")
- results = ydl.extract_info(search_url, download=False)
-
- if not results or 'entries' not in results:
- return {"results": []}
-
- processed_results = []
- seen_channels = set() # Track unique channels
- channel_videos = {}
-
- for entry in results.get('entries', []):
- try:
- channel_id = entry.get('channel_id') or entry.get('uploader_id')
- if not channel_id:
- continue
-
- # First collect the video regardless of whether we've seen the channel
- if channel_id not in channel_videos:
- channel_videos[channel_id] = []
- if len(channel_videos[channel_id]) < 3: # Limit to 3 videos
- channel_videos[channel_id].append({
- 'id': entry.get('id', ''),
- 'title': entry.get('title', ''),
- 'duration': entry.get('duration'),
- 'url': f"https://www.youtube.com/watch?v={entry.get('id')}"
- })
- print(f"Added video to channel {channel_id}, now has {len(channel_videos[channel_id])} videos")
-
-
- # Now check if we've already processed this channel
- if channel_id in seen_channels:
- continue
-
- seen_channels.add(channel_id)
-
- # Get minimal channel info
- channel_opts = ydl_opts.copy()
- channel_opts['extract_flat'] = True
- channel_opts['process'] = False
-
- channel_url = f"https://www.youtube.com/channel/{channel_id}"
- channel_info = ydl.extract_info(
- channel_url,
- download=False,
- process=False # Don't process more than necessary
- )
-
- # Get avatar URL from channel info
- thumbnail_url = None
- if channel_info and channel_info.get('thumbnails'):
- # Try to find avatar-specific thumbnails first
- avatar_thumbnails = [t for t in channel_info['thumbnails']
- if t.get('id', '').startswith('avatar')]
-
- if avatar_thumbnails:
- # Get the largest avatar thumbnail
- thumbnail_url = avatar_thumbnails[-1]['url']
- else:
- # Fallback: try to find any thumbnail with "avatar" in the URL
- avatar_thumbnails = [t for t in channel_info['thumbnails']
- if 'avatar' in t.get('url', '').lower()]
- if avatar_thumbnails:
- thumbnail_url = avatar_thumbnails[-1]['url']
- else:
- # Last resort: use the first thumbnail
- thumbnail_url = channel_info['thumbnails'][0]['url']
- print(f"Creating channel {channel_id} with {len(channel_videos[channel_id])} videos")
- channel = YouTubeChannel(
- channel_id=channel_id,
- name=entry.get('channel', '') or entry.get('uploader', ''),
- description=entry.get('description', '')[:500] if entry.get('description') else '',
- subscriber_count=None,
- url=f"https://www.youtube.com/channel/{channel_id}",
- video_count=None,
- thumbnail_url=thumbnail_url or entry.get('channel_thumbnail', ''),
- recent_videos=channel_videos[channel_id] # <-- Use our collected videos here
- )
-
- if len(processed_results) < max_results:
- channel_dict = channel.dict()
- channel_dict['recent_videos'] = channel_videos[channel_id] # Explicitly set after dict conversion
- processed_results.append(channel_dict)
- else:
- break
-
- except Exception as entry_error:
- logging.error(f"Error processing channel entry: {entry_error}")
- continue
-
- logging.info(f"Found {len(processed_results)} channels")
- return {"results": processed_results}
-
- except Exception as e:
- logging.error(f"YouTube channel search error: {str(e)}")
- raise HTTPException(
- status_code=500,
- detail=f"Error searching YouTube channels: {str(e)}"
- )
-
-def process_youtube_channel(podcast_id: int, channel_id: str, feed_cutoff: int):
- cnx = create_database_connection()
- try:
- database_functions.youtube.process_youtube_videos(database_type, podcast_id, channel_id, cnx, feed_cutoff)
- finally:
- close_database_connection(cnx)
-
-@app.post("/api/data/youtube/subscribe")
-async def subscribe_to_youtube_channel(
- channel_id: str,
- user_id: int,
- background_tasks: BackgroundTasks,
- feed_cutoff: int = 30,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- """Subscribe to a YouTube channel"""
- import logging
- logger = logging.getLogger(__name__)
-
- try:
- logger.info(f"Starting subscription for channel {channel_id}")
-
- existing_id = database_functions.functions.check_existing_channel_subscription(cnx, database_type, channel_id, user_id)
- if existing_id:
- logger.info(f"Channel {channel_id} already subscribed")
- return {
- "success": True,
- "podcast_id": existing_id,
- "message": "Already subscribed to this channel"
- }
-
- logger.info("Getting channel info")
- channel_info = await database_functions.youtube.get_channel_info(channel_id)
-
- logger.info("Adding channel to database")
- podcast_id = database_functions.functions.add_youtube_channel(cnx, database_type, channel_info, user_id, feed_cutoff)
-
- logger.info(f"Starting background task for podcast_id {podcast_id}")
- background_tasks.add_task(process_youtube_channel, podcast_id, channel_id, feed_cutoff)
-
- logger.info("Subscription completed successfully")
- return {
- "success": True,
- "podcast_id": podcast_id,
- "message": "Channel subscription initiated. Videos will be processed in background."
- }
- except Exception as e:
- logger.error(f"Error subscribing to channel: {str(e)}", exc_info=True)
- raise HTTPException(
- status_code=500,
- detail=f"Error subscribing to channel: {str(e)}"
- )
-
-@app.post("/api/auth/store_state")
-async def store_oidc_state(
- request: Request,
-):
- try:
- data = await request.json()
- state = data.get('state')
- client_id = data.get('client_id')
-
- if not state or not client_id:
- raise HTTPException(status_code=400, detail="Missing state or client_id")
-
- success = database_functions.oidc_state_manager.oidc_state_manager.store_state(state, client_id)
- if not success:
- raise HTTPException(status_code=500, detail="Failed to store state")
-
- return {"status": "success"}
- except Exception as e:
- logging.error(f"Error storing OIDC state: {str(e)}")
- raise HTTPException(status_code=500, detail="Failed to store state")
-
-@app.get("/api/auth/callback")
-async def oidc_callback(
- request: Request,
- code: str,
- state: str = None,
- cnx=Depends(get_database_connection)
-):
- try:
- base_url = str(request.base_url)[:-1]
- # Force HTTPS if running in production
- if not base_url.startswith('http://localhost'):
- if base_url.startswith('http:'):
- base_url = 'https:' + base_url[5:]
-
- print(f"Base URL: {base_url}")
- frontend_base = base_url.replace('/api', '')
-
- # Get client_id from query parameters
- client_id = database_functions.oidc_state_manager.oidc_state_manager.get_client_id(state)
- if not client_id:
- return RedirectResponse(
- url=f"{frontend_base}/oauth/callback?error=invalid_state"
- )
-
- registered_redirect_uri = f"{base_url}/api/auth/callback"
- print(f"Using redirect_uri: {registered_redirect_uri}")
-
- # Get OIDC provider details
- provider = database_functions.functions.get_oidc_provider(cnx, database_type, client_id)
- if not provider:
- return RedirectResponse(
- url=f"{frontend_base}/oauth/callback?error=invalid_provider"
- )
-
- # Unpack provider details
- provider_id, client_id, client_secret, token_url, userinfo_url, name_claim, email_claim, username_claim, roles_claim, user_role, admin_role = provider
-
- # Exchange authorization code for access token
- async with httpx.AsyncClient() as client:
- try:
- token_response = await client.post(
- token_url,
- data={
- "grant_type": "authorization_code",
- "code": code,
- "redirect_uri": registered_redirect_uri,
- "client_id": client_id,
- "client_secret": client_secret,
- },
- headers={
- "Accept": "application/json"
- }
- )
-
- if token_response.status_code != 200:
- return RedirectResponse(
- url=f"{frontend_base}/oauth/callback?error=token_exchange_failed"
- )
-
- token_data = token_response.json()
- print(f"Token response: {token_data}")
- access_token = token_data.get("access_token")
-
- # Get user info from OIDC provider
- headers = {
- "Authorization": f"Bearer {access_token}",
- "User-Agent": "PinePods/1.0", # Add a meaningful user agent
- "Accept": "application/json"
- }
- userinfo_response = await client.get(userinfo_url, headers=headers)
-
- if userinfo_response.status_code != 200:
- error_content = userinfo_response.text
- print(f"GitHub API error: {error_content}")
- return RedirectResponse(
- url=f"{frontend_base}/oauth/callback?error=userinfo_failed"
- )
-
- user_info = userinfo_response.json()
- print(f"User info response: {user_info}")
- email = user_info.get(email_claim or "email")
-
- parsed_url = urlparse(userinfo_url)
- if not email and parsed_url.hostname == 'api.github.com':
- # For GitHub, we may need to make a separate request for emails
- # because GitHub doesn't include email in user info if it's private
- emails_response = await client.get(
- 'https://api.github.com/user/emails',
- headers=headers
- )
-
- if emails_response.status_code == 200:
- emails = emails_response.json()
- # Find the primary email
- for email_obj in emails:
- if email_obj.get('primary') and email_obj.get('verified'):
- email = email_obj.get('email')
- break
-
- # If no primary found, take the first verified one
- if not email:
- for email_obj in emails:
- if email_obj.get('verified'):
- email = email_obj.get('email')
- break
-
- if not email:
- return RedirectResponse(
- url=f"{frontend_base}/oauth/callback?error=email_required"
- )
-
- except httpx.RequestError:
- return RedirectResponse(
- url=f"{frontend_base}/oauth/callback?error=network_error"
- )
-
- # Verify access.
- if roles_claim and user_role:
- roles = user_info.get(roles_claim)
- if not isinstance(roles, list):
- print(f'Claim {roles_claim} should be a list of strings, but it is {roles}.')
- return RedirectResponse(
- url=f"{frontend_base}/oauth/callback?error=no_access&details=invalid_roles"
- )
- if user_role not in roles and not (admin_role and admin_role in roles):
- print(f"User user role {user_role} {f'and admin role {admin_role}' if admin_role else ''} not in user's roles ({roles}), denying access.")
- return RedirectResponse(
- url=f"{frontend_base}/oauth/callback?error=no_access"
- )
-
- # Check if user exists
- user = database_functions.functions.get_user_by_email(cnx, database_type, email)
-
- # In your OIDC callback function, replace the user creation section with:
-
- # Determine the user's information
- fullname = user_info.get(name_claim or "name", "")
- if username_claim and username_claim not in user_info:
- print(f"Unable to determine username for user, username claim {username_claim} not present")
- return RedirectResponse(
- url=f"{frontend_base}/oauth/callback?error=user_creation_failed&details=username_claim_missing"
- )
- username = user_info.get(username_claim or "preferred_username")
-
- if not user:
- # Create new user
- print(f"User with email {email} not found, creating new user")
-
- if username is None:
- username = email.split("@")[0].lower()
- base_username = username
- counter = 1
- max_attempts = 10
-
- while counter <= max_attempts:
- try:
- print(f"Attempt {counter} to create user with base username: {base_username}")
- user_id = database_functions.functions.create_oidc_user(
- cnx, database_type, email, fullname, username
- )
- print(f"User created successfully with ID: {user_id}")
-
- if not user_id:
- print(f"ERROR: Invalid user_id returned: {user_id}")
- return RedirectResponse(
- url=f"{frontend_base}/oauth/callback?error=invalid_user_id"
- )
-
- print(f"Creating API key for user_id: {user_id}")
- api_key = database_functions.functions.create_api_key(cnx, database_type, user_id)
- print(f"API key created: {api_key[:5]}... (truncated for security)")
- break
- except UniqueViolation:
- print(f"Username conflict with {username}, trying next variation")
- username = f"{base_username}{counter}"
- counter += 1
- if counter > max_attempts:
- print(f"Failed to create user after {max_attempts} attempts due to username conflicts")
- return RedirectResponse(
- url=f"{frontend_base}/oauth/callback?error=username_conflict"
- )
- except Exception as e:
- print(f"Error during user creation: {str(e)}")
- import traceback
- print(f"Traceback: {traceback.format_exc()}")
- return RedirectResponse(
- url=f"{frontend_base}/oauth/callback?error=user_creation_failed&details={str(e)[:50]}"
- )
- else:
- print("Failed to create user after maximum attempts")
- return RedirectResponse(
- url=f"{frontend_base}/oauth/callback?error=user_creation_failed"
- )
-
- else:
- try:
- print(f"Attempt to create user with username: {username}")
- user_id = database_functions.functions.create_oidc_user(
- cnx, database_type, email, fullname, username
- )
- print(f"User created successfully with ID: {user_id}")
-
- if not user_id:
- print(f"ERROR: Invalid user_id returned: {user_id}")
- return RedirectResponse(
- url=f"{frontend_base}/oauth/callback?error=invalid_user_id"
- )
-
- print(f"Creating API key for user_id: {user_id}")
- api_key = database_functions.functions.create_api_key(cnx, database_type, user_id)
- print(f"API key created: {api_key[:5]}... (truncated for security)")
- except UniqueViolation:
- print("Failed to create user due to username conflicts")
- return RedirectResponse(
- url=f"{frontend_base}/oauth/callback?error=username_conflict"
- )
- except Exception as e:
- print(f"Error during user creation: {str(e)}")
- import traceback
- print(f"Traceback: {traceback.format_exc()}")
- return RedirectResponse(
- url=f"{frontend_base}/oauth/callback?error=user_creation_failed&details={str(e)[:50]}"
- )
-
- else:
- # Existing user - retrieve their API key
- print(f"User with email {email} found, retrieving API key")
- user_id = user[0] if isinstance(user, tuple) else user['userid'] # Adjust based on your DB return format
-
- api_key = database_functions.functions.get_user_api_key(cnx, database_type, user_id)
- if not api_key:
- print(f"No API key found for user_id: {user_id}, creating a new one")
- api_key = database_functions.functions.create_api_key(cnx, database_type, user_id)
-
- print(f"API key retrieved: {api_key[:5]}... (truncated for security)")
-
- # Update user info based on OIDC information.
- database_functions.functions.set_fullname(cnx, database_type, user_id, fullname)
-
- current_username = user[2] if isinstance(user, tuple) else user['username']
- if username_claim and username != current_username:
- if database_functions.functions.check_usernames(cnx, database_type, username):
- print(f'Unable to update username for user {user_id} to match the username specified by the OIDC provider ({username}) as this is already in use by another user.')
- else:
- database_functions.functions.set_username(cnx, database_type, user_id, username)
-
- # Update admin role based on OIDC roles.
- if roles_claim and admin_role:
- roles = user_info.get(roles_claim)
- if not isinstance(roles, list):
- print(f'Claim {roles_claim} should be a list of strings, but it is {roles}.')
- return RedirectResponse(
- url=f"{frontend_base}/oauth/callback?error=no_access&details=invalid_roles"
- )
- database_functions.functions.set_isadmin(cnx, database_type, user_id, admin_role in roles)
-
- # Success case - redirect with API key
- return RedirectResponse(url=f"{frontend_base}/oauth/callback?api_key={api_key}")
-
- except Exception as e:
- logging.error(f"OIDC callback error: {str(e)}")
- return RedirectResponse(
- url=f"{frontend_base}/oauth/callback?error=authentication_failed"
- )
-
-# Store active connections
-class ConnectionManager:
- def __init__(self):
- # Map of user_id to list of websocket connections
- self.active_connections: Dict[int, List[WebSocket]] = {}
-
- async def connect(self, websocket: WebSocket, user_id: int):
- await websocket.accept()
- if user_id not in self.active_connections:
- self.active_connections[user_id] = []
- self.active_connections[user_id].append(websocket)
-
- def disconnect(self, websocket: WebSocket, user_id: int):
- if user_id in self.active_connections:
- if websocket in self.active_connections[user_id]:
- self.active_connections[user_id].remove(websocket)
- if not self.active_connections[user_id]:
- del self.active_connections[user_id]
-
- async def broadcast_to_user(self, user_id: int, message: Dict[str, Any]):
- if user_id in self.active_connections:
- # Convert to JSON once for efficiency
- json_message = json.dumps(message)
- disconnected = []
-
- # Send to all connections for this user
- for websocket in self.active_connections[user_id]:
- try:
- await websocket.send_text(json_message)
- except Exception:
- disconnected.append(websocket)
-
- # Clean up any failed connections
- for websocket in disconnected:
- self.disconnect(websocket, user_id)
-
-# Initialize connection manager
-manager = ConnectionManager()
-
-# Define the broadcast message model
-class BroadcastMessage(BaseModel):
- user_id: int
- message: Dict[str, Any]
-
-@app.post("/api/tasks/broadcast")
-async def broadcast_task_update(
- data: BroadcastMessage,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- """Endpoint to broadcast a task update to a user via WebSocket"""
-
- # Verify API key
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(
- status_code=403,
- detail="Your API key is either invalid or does not have correct permission"
- )
-
- # Check if manager has the user in active connections
- user_id = data.user_id
- has_connections = user_id in manager.active_connections
- print(f"Broadcasting to user {user_id}, has connections: {has_connections}")
-
- if has_connections:
- # Broadcast the message
- await manager.broadcast_to_user(user_id, data.message)
- return {"success": True, "message": f"Broadcast sent to user {user_id}"}
- else:
- print(f"No active connections for user {user_id}")
- return {"success": False, "message": f"No active connections for user {user_id}"}
-
-# Model for task query parameters
-class TaskQueryParams(BaseModel):
- user_id: int
-
-# Extract API key from WebSocket query parameters
-async def get_api_key_from_websocket(websocket: WebSocket) -> str:
- query_params = websocket.query_params
- api_key = query_params.get("api_key")
-
- if not api_key:
- raise ValueError("API key is required")
-
- return api_key
-
-@app.get("/api/tasks/active")
-async def get_active_tasks(
- user_id: int,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- # Verify API key
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(
- status_code=403,
- detail="Your API key is either invalid or does not have correct permission"
- )
-
- # Check if user has permission to access these tasks
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- is_web_key = api_key == base_webkey.web_key
-
- if key_id != user_id and not is_web_key:
- raise HTTPException(
- status_code=403,
- detail="You can only view your own tasks"
- )
-
- # Get all active tasks for the user - this needs to be expanded
- # to include all types of tasks, not just downloads
- active_tasks = database_functions.tasks.get_all_active_tasks(user_id)
-
- return {"tasks": active_tasks}
-
-# Add this DEBUG logging to your FastAPI WebSocket endpoint in clientapi.py
-@app.websocket("/ws/api/tasks/{user_id}")
-async def websocket_endpoint(
- websocket: WebSocket,
- user_id: int,
- cnx=Depends(get_database_connection)
-):
- print(f"WebSocket connection request received for user {user_id}")
- # Get API key from websocket query params
- try:
- api_key = await get_api_key_from_websocket(websocket)
- print(f"WebSocket API key validated for user {user_id}")
-
- # Verify API key
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- print(f"Invalid API key for WebSocket connection, user {user_id}")
- await websocket.close(code=status.WS_1008_POLICY_VIOLATION)
- return
-
- # Check if user has permission
- key_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- is_web_key = api_key == base_webkey.web_key
-
- if key_id != user_id and not is_web_key:
- print(f"Permission denied for WebSocket connection, user {user_id}")
- await websocket.close(code=status.WS_1008_POLICY_VIOLATION)
- return
-
- # Accept the connection
- await manager.connect(websocket, user_id)
- print(f"WebSocket connection accepted for user {user_id}")
-
- # Send initial task list with all types of tasks
- active_tasks = database_functions.tasks.get_all_active_tasks(user_id)
- print(f"Found {len(active_tasks)} active tasks for user {user_id}")
- await websocket.send_text(json.dumps({
- "event": "initial",
- "tasks": active_tasks
- }))
-
- # Keep connection alive and handle messages
- try:
- while True:
- # Handle any incoming messages (client might request refresh)
- message = await websocket.receive_text()
- data = json.loads(message)
- print(f"Received WebSocket message from user {user_id}: {data}")
-
- if data.get("action") == "refresh":
- # Send updated task list with all tasks
- active_tasks = database_functions.tasks.get_all_active_tasks(user_id)
- await websocket.send_text(json.dumps({
- "event": "refresh",
- "tasks": active_tasks
- }))
-
- # Wait a short while before next iteration
- await asyncio.sleep(0.1)
-
- except WebSocketDisconnect:
- print(f"WebSocket disconnected for user {user_id}")
- manager.disconnect(websocket, user_id)
-
- except Exception as e:
- print(f"WebSocket error for user {user_id}: {str(e)}")
- try:
- await websocket.close(code=status.WS_1011_INTERNAL_ERROR)
- except:
- pass
-
-
-class InitRequest(BaseModel):
- api_key: str
-
-@app.post("/api/init/startup_tasks")
-async def run_startup_tasks(request: InitRequest, cnx=Depends(get_database_connection)):
- try:
- print('start of startup')
- # Verify if the API key is valid
- is_valid = database_functions.functions.verify_api_key(cnx, database_type, request.api_key)
- web_key = database_functions.functions.get_web_key(cnx, database_type)
- # Check if the provided API key is the web key
- is_web_key = request.api_key == web_key
-
- if not is_valid or not is_web_key:
- raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail="Invalid or unauthorized API key")
-
- # Execute the startup tasks
- database_functions.functions.add_news_feed_if_not_added(database_type, cnx)
- return {"status": "Startup tasks completed successfully."}
-
- database_functions.valkey_client.connect()
- except Exception as e:
- logger.error(f"Error in startup tasks: {e}")
- raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Failed to complete startup tasks")
- finally:
- # The connection will automatically be closed by FastAPI's dependency system
- pass
-
-async def async_tasks():
- # Start cleanup task
- logging.info("Starting cleanup tasks")
- asyncio.create_task(cleanup_temp_mfa_secrets())
-
-
-if __name__ == '__main__':
- raw_debug_mode = os.environ.get("DEBUG_MODE", "False")
- DEBUG_MODE = raw_debug_mode.lower() == "true"
- if DEBUG_MODE:
- logging.info("Debug Mode Enabled")
- else:
- logging.info("Debug Mode Disabled")
- config_file = "/pinepods/startup/logging_config_debug.ini" if DEBUG_MODE else "/pinepods/startup/logging_config.ini"
- logging.info(config_file)
- parser = argparse.ArgumentParser()
- parser.add_argument('--port', type=int, default=8032, help='Port to run the server on')
- args = parser.parse_args()
- asyncio.run(async_tasks())
-
- import uvicorn
-
- uvicorn.run(
- "clientapi:app",
- host="0.0.0.0",
- port=args.port,
- log_config=config_file,
- limit_concurrency=1000,
- )
diff --git a/completed_todos.md b/completed_todos.md
index 190a2690..9346c606 100644
--- a/completed_todos.md
+++ b/completed_todos.md
@@ -13,6 +13,32 @@ Major Version:
- [ ] Fix episode spacing on queue page. The context button still shows even on smallest screens
- [ ] Check youtube download Issues when changing the download time
+0.8.2
+
+- [x] Translations on the web app
+- [x] Account Settings now updates dropdowns with pre-populated values
+- [x] episode-layout (podcast page) will now set sort settings based on pod id
+- [x] Added endpoint to delete OIDC settings
+- [x] Added endpoint to Edit OIDC settings
+- [x] Manually search or enter podcast index id for matching to podcast index
+- [x] OIDC Setup on start
+- [x] Better errors if needed vars are missing
+- [x] Redis/Valkey Authentication
+- [x] Move Episode Addition process to the background when adding a podcast
+- [x] Support HTTP request notifications. Will work with Telegram and quite a few other basic http notification platforms
+- [x] Podcast Merge Options
+- [x] Individual Episode download on /episode page
+- [x] Option to use Podcast covers if desired
+- [x] Fix issue where release date on podcasts not added shows as current date/time
+- [x] Fix yt-dlp issues
+
+- [x] Gpodder Completion Set Bug where if episode played length was exactly the length of the podcast episode it wouldn't mark complete
+- [x] Fixed issue with auto complete threshold. Will now mark historical episodes complete when enabled
+- [x] Some sort of loading indicator for the single ep download
+- [x] Fix issue where duplicate episodes were created if details of the episode were updated
+- [x] Fully dynamic Playlist implementation
+- [x] Checking on rss feeds returning downloaded urls correctly
+
0.7.9
- [x] Finish implementing long finger press - fix on iOS (close, it doesn't auto close when clicking away currently)
diff --git a/database_functions/app_functions.py b/database_functions/app_functions.py
deleted file mode 100644
index 82c56678..00000000
--- a/database_functions/app_functions.py
+++ /dev/null
@@ -1,289 +0,0 @@
-from typing import Optional
-
-def send_email(server_name, server_port, from_email, to_email, send_mode, encryption, auth_required, username, password, subject, body):
- import smtplib
- from email.mime.multipart import MIMEMultipart
- from email.mime.text import MIMEText
- import ssl
- import socket
-
- try:
- if send_mode == "SMTP":
- # Set up the SMTP server.
- if encryption == "SSL/TLS":
- smtp = smtplib.SMTP_SSL(server_name, server_port, timeout=10)
- elif encryption == "STARTTLS":
- smtp = smtplib.SMTP(server_name, server_port, timeout=10)
- smtp.starttls()
- else: # No encryption
- smtp = smtplib.SMTP(server_name, server_port, timeout=10)
-
-
- # Authenticate if needed.
- if auth_required:
- try: # Trying to login and catching specific SMTPNotSupportedError
- smtp.login(username, password)
- except smtplib.SMTPNotSupportedError:
- return 'SMTP AUTH extension not supported by server.'
-
- # Create a message.
- msg = MIMEMultipart()
- msg['From'] = from_email
- msg['To'] = to_email
- msg['Subject'] = subject
- msg.attach(MIMEText(body, 'plain'))
-
- # Send the message.
- smtp.send_message(msg)
- smtp.quit()
- return 'Email sent successfully.'
-
- elif send_mode == "Sendmail":
- pass
- except ssl.SSLError:
- return 'SSL Wrong Version Number. Try another ssl type?'
- except smtplib.SMTPAuthenticationError:
- return 'Authentication Error: Invalid username or password.'
- except smtplib.SMTPRecipientsRefused:
- return 'Recipients Refused: Email address is not accepted by the server.'
- except smtplib.SMTPSenderRefused:
- return 'Sender Refused: Sender address is not accepted by the server.'
- except smtplib.SMTPDataError:
- return 'Unexpected server response: Possibly the message data was rejected by the server.'
- except socket.gaierror:
- return 'Server Not Found: Please check your server settings.'
- except ConnectionRefusedError:
- return 'Connection Refused: The server refused the connection.'
- except TimeoutError:
- return 'Timeout Error: The connection to the server timed out.'
- except smtplib.SMTPException as e:
- return f'Failed to send email: {str(e)}'
-
-
-
-def sync_with_nextcloud(nextcloud_url, nextcloud_token):
- print("Starting Nextcloud Sync")
-
- headers = {
- "Authorization": f"Bearer {nextcloud_token}",
- "Content-Type": "application/json"
- }
-
- # Sync Subscriptions
- sync_subscriptions(nextcloud_url, headers)
-
- # Sync Episode Actions
- sync_episode_actions(nextcloud_url, headers)
-
-
-def sync_subscriptions(nextcloud_url, headers, user_id):
- import requests
- # Implement fetching and updating subscriptions
- # Example GET request to fetch subscriptions
- response = requests.get(f"{nextcloud_url}/index.php/apps/gpoddersync/subscriptions", headers=headers)
- # Handle the response
- print(response.json())
-
-
-def sync_subscription_change(nextcloud_url, headers, add, remove):
- import requests
- payload = {
- "add": add,
- "remove": remove
- }
- response = requests.post(f"{nextcloud_url}/index.php/apps/gpoddersync/subscription_change/create", json=payload,
- headers=headers)
-
-def sync_subscription_change_gpodder(gpodder_url, gpodder_login, auth, add, remove):
- import requests
- payload = {
- "add": add,
- "remove": remove
- }
- response = requests.post(f"{gpodder_url}/api/2/subscriptions/{gpodder_login}/default.json", json=payload, auth=auth)
- response.raise_for_status()
- print(f"Subscription changes synced with gPodder: {response.text}")
-
-
-def sync_subscription_change_gpodder_session(session, gpodder_url, gpodder_login, add, remove):
- """Sync subscription changes using session-based authentication"""
- import logging
-
- logger = logging.getLogger(__name__)
-
- payload = {
- "add": add,
- "remove": remove
- }
-
- try:
- response = session.post(
- f"{gpodder_url}/api/2/subscriptions/{gpodder_login}/default.json",
- json=payload
- )
- response.raise_for_status()
- logger.info(f"Subscription changes synced with gPodder using session: {response.text}")
- return True
- except Exception as e:
- logger.error(f"Error syncing subscription changes with session: {str(e)}")
- return False
-
-def sync_episode_actions(nextcloud_url, headers):
- print('test')
- # Implement fetching and creating episode actions
- # Similar to the sync_subscriptions method
-
-def get_podcast_values(feed_url, user_id, username: Optional[str] = None, password: Optional[str] = None, display_only: bool = False):
- import feedparser
- import json
- import requests
- from requests.auth import HTTPBasicAuth
-
- # Use requests to fetch the feed content
- try:
- # Simpler headers that worked in the original version
- headers = {
- 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3',
- 'Accept-Language': 'en-US,en;q=0.9',
- }
- print(f"Fetching URL: {feed_url}")
-
- if username and password:
- print(f"Using auth for user: {username}")
- response = requests.get(feed_url, headers=headers, auth=HTTPBasicAuth(username, password))
- else:
- response = requests.get(feed_url, headers=headers)
-
- response.raise_for_status()
- # Use binary content which worked in the original version
- feed_content = response.content
-
- except requests.RequestException as e:
- try:
- if 'response' in locals():
- print(f"Response headers: {response.headers}")
- print(f"Response content: {response.content[:500]}")
- except:
- pass
- raise ValueError(f"Error fetching the feed: {str(e)}")
-
- # Parse the feed
- d = feedparser.parse(feed_content)
- print(f"Feed parsed - title: {d.feed.get('title', 'Unknown')}")
-
- # Initialize podcast_values as in the original version that worked
- podcast_values = {
- 'pod_title': d.feed.title if hasattr(d.feed, 'title') else None,
- 'pod_artwork': None, # We'll set this with multiple checks below
- 'pod_author': d.feed.author if hasattr(d.feed, 'author') else None,
- 'categories': [],
- 'pod_description': d.feed.description if hasattr(d.feed, 'description') else None,
- 'pod_episode_count': len(d.entries) if display_only else 0,
- 'pod_feed_url': feed_url,
- 'pod_website': d.feed.link if hasattr(d.feed, 'link') else None,
- 'pod_explicit': False,
- 'user_id': user_id
- }
-
- # Enhanced image URL extraction combining both approaches
- if hasattr(d.feed, 'image'):
- if hasattr(d.feed.image, 'href'):
- podcast_values['pod_artwork'] = d.feed.image.href
- elif hasattr(d.feed.image, 'url'): # Added for news feed format
- podcast_values['pod_artwork'] = d.feed.image.url
- elif isinstance(d.feed.image, dict):
- if 'href' in d.feed.image:
- podcast_values['pod_artwork'] = d.feed.image['href']
- elif 'url' in d.feed.image:
- podcast_values['pod_artwork'] = d.feed.image['url']
-
- # iTunes image fallback
- if not podcast_values['pod_artwork'] and hasattr(d.feed, 'itunes_image'):
- if hasattr(d.feed.itunes_image, 'href'):
- podcast_values['pod_artwork'] = d.feed.itunes_image.href
- elif isinstance(d.feed.itunes_image, dict) and 'href' in d.feed.itunes_image:
- podcast_values['pod_artwork'] = d.feed.itunes_image['href']
-
- # Author fallback
- if not podcast_values['pod_author'] and hasattr(d.feed, 'itunes_author'):
- podcast_values['pod_author'] = d.feed.itunes_author
-
- # Description fallbacks
- if not podcast_values['pod_description']:
- if hasattr(d.feed, 'subtitle'):
- podcast_values['pod_description'] = d.feed.subtitle
- elif hasattr(d.feed, 'itunes_summary'):
- podcast_values['pod_description'] = d.feed.itunes_summary
-
- # Category extraction with robust error handling
- try:
- if hasattr(d.feed, 'itunes_category'):
- if isinstance(d.feed.itunes_category, list):
- for cat in d.feed.itunes_category:
- if isinstance(cat, dict) and 'text' in cat:
- podcast_values['categories'].append(cat['text'])
- elif hasattr(cat, 'text'):
- podcast_values['categories'].append(cat.text)
- elif isinstance(d.feed.itunes_category, dict) and 'text' in d.feed.itunes_category:
- podcast_values['categories'].append(d.feed.itunes_category['text'])
- except Exception as e:
- print(f"Error extracting categories: {e}")
-
- # Handle empty categories
- if not podcast_values['categories']:
- podcast_values['categories'] = {'1': 'Podcasts'} # Default category
- else:
- categories_dict = {str(i): cat for i, cat in enumerate(podcast_values['categories'], start=1)}
- podcast_values['categories'] = categories_dict
-
- # Add explicit check with robust handling
- try:
- if hasattr(d.feed, 'itunes_explicit'):
- if isinstance(d.feed.itunes_explicit, str):
- podcast_values['pod_explicit'] = d.feed.itunes_explicit.lower() in ('yes', 'true', '1')
- elif isinstance(d.feed.itunes_explicit, bool):
- podcast_values['pod_explicit'] = d.feed.itunes_explicit
- except Exception as e:
- print(f"Error checking explicit flag: {e}")
-
- # Print values for debugging
- print("Extracted podcast values:")
- for key, value in podcast_values.items():
- print(f"{key}: {value}")
-
- return podcast_values
-
-
-
-def check_valid_feed(feed_url: str, username: Optional[str] = None, password: Optional[str] = None):
- """
- Check if the provided URL points to a valid podcast feed.
- Raises ValueError if the feed is invalid.
- """
- import feedparser
- import requests
- # Use requests to fetch the feed content
- try:
- if username and password:
- response = requests.get(feed_url, auth=(username, password))
- else:
- response = requests.get(feed_url)
-
- response.raise_for_status() # Raise an exception for HTTP errors
- feed_content = response.content
- except requests.RequestException as e:
- raise ValueError(f"Error fetching the feed: {str(e)}")
-
- # Parse the feed
- parsed_feed = feedparser.parse(feed_content)
-
- # Check for basic RSS or Atom feed structure
- if not parsed_feed.get('version'):
- raise ValueError("Invalid podcast feed URL or content.")
-
- # Check for essential elements in the feed
- if not ('title' in parsed_feed.feed and 'link' in parsed_feed.feed and 'description' in parsed_feed.feed):
- raise ValueError("Feed missing required attributes: title, link, or description.")
-
- # If it passes the above checks, it's likely a valid feed
- return parsed_feed
diff --git a/database_functions/auth_functions.py b/database_functions/auth_functions.py
deleted file mode 100644
index 43367061..00000000
--- a/database_functions/auth_functions.py
+++ /dev/null
@@ -1,41 +0,0 @@
-from passlib.context import CryptContext
-
-# Create a Passlib context for Argon2
-pwd_context = CryptContext(schemes=["argon2"], deprecated="auto")
-
-def hash_password(password: str):
- # Use the Passlib context to hash the password
- hashed_password = pwd_context.hash(password)
- return hashed_password
-
-def verify_password(cnx, database_type, username: str, password: str) -> bool:
- print("preparing pw check")
- if database_type == "postgresql":
- cursor = cnx.cursor()
- cursor.execute('SELECT Hashed_PW FROM "Users" WHERE Username = %s', (username,))
- else: # MySQL or MariaDB
- cursor = cnx.cursor(buffered=True)
- cursor.execute("SELECT Hashed_PW FROM Users WHERE Username = %s", (username,))
-
- result = cursor.fetchone()
- cursor.close()
- print("ran pw get")
-
- if not result:
- print("User not found")
- return False # User not found
-
- stored_hashed_password = result[0] if isinstance(result, tuple) else result["hashed_pw"] if result and "hashed_pw" in result else 0
- # Check the type of the result and access the is_admin value accordingly
- # is_admin = is_admin_result[0] if isinstance(is_admin_result, tuple) else is_admin_result["IsAdmin"] if is_admin_result else 0
-
- print(f"Stored hashed password: {stored_hashed_password}")
-
- try:
- # Use the Passlib context to verify the password against the stored hash
- is_valid = pwd_context.verify(password, stored_hashed_password)
- print(f"Password verification result: {is_valid}")
- return is_valid
- except Exception as e:
- print(f"Error verifying password: {e}")
- return False
\ No newline at end of file
diff --git a/database_functions/db_client.py b/database_functions/db_client.py
deleted file mode 100644
index de26eb47..00000000
--- a/database_functions/db_client.py
+++ /dev/null
@@ -1,141 +0,0 @@
-import os
-import logging
-import traceback
-from fastapi import HTTPException
-import psycopg
-from psycopg_pool import ConnectionPool
-from psycopg.rows import dict_row
-from mysql.connector import pooling
-
-# Set up logging
-logger = logging.getLogger(__name__)
-
-# Get database type from environment variable
-database_type = str(os.getenv('DB_TYPE', 'mariadb'))
-
-# Create a singleton for the connection pool
-class DatabaseConnectionPool:
- _instance = None
- _pool = None
-
- @classmethod
- def get_instance(cls):
- if cls._instance is None:
- cls._instance = DatabaseConnectionPool()
- return cls._instance
-
- def __init__(self):
- if self._pool is None:
- self._pool = self._create_pool()
-
- def _create_pool(self):
- """Create a new connection pool based on the database type"""
- db_host = os.environ.get("DB_HOST", "127.0.0.1")
- db_port = os.environ.get("DB_PORT", "3306")
- db_user = os.environ.get("DB_USER", "root")
- db_password = os.environ.get("DB_PASSWORD", "password")
- db_name = os.environ.get("DB_NAME", "pypods_database")
-
- print(f"Creating new database connection pool for {database_type}")
-
- if database_type == "postgresql":
- conninfo = f"host={db_host} port={db_port} user={db_user} password={db_password} dbname={db_name}"
- return ConnectionPool(conninfo=conninfo, min_size=1, max_size=32, open=True)
- else:
- # Add the autocommit and consume_results options to MySQL
- return pooling.MySQLConnectionPool(
- pool_name="pinepods_api_pool",
- pool_size=32,
- pool_reset_session=True,
- autocommit=True, # Add this to prevent transaction issues
- consume_results=True, # Add this to automatically consume unread results
- collation="utf8mb4_general_ci",
- host=db_host,
- port=db_port,
- user=db_user,
- password=db_password,
- database=db_name,
- )
-
- def get_connection(self):
- """Get a connection from the pool"""
- if database_type == "postgresql":
- return self._pool.getconn()
- else:
- return self._pool.get_connection()
-
- def return_connection(self, cnx):
- """Return a connection to the pool"""
- if database_type == "postgresql":
- self._pool.putconn(cnx) # PostgreSQL path unchanged
- else:
- # MySQL-specific cleanup
- try:
- # Clear any unread results before returning to pool
- if hasattr(cnx, 'unread_result') and cnx.unread_result:
- cursor = cnx.cursor()
- cursor.fetchall()
- cursor.close()
- except Exception as e:
- logger.warning(f"Failed to clean up MySQL connection: {str(e)}")
- finally:
- cnx.close()
-
-# Initialize the singleton pool
-pool = DatabaseConnectionPool.get_instance()
-
-def create_database_connection():
- """Create and return a new database connection"""
- try:
- return pool.get_connection()
- except Exception as e:
- print(f"Database connection error: {str(e)}")
- logger.error(f"Database connection error of type {type(e).__name__} with arguments: {e.args}")
- logger.error(traceback.format_exc())
- raise RuntimeError("Unable to connect to the database")
-
-def close_database_connection(cnx):
- """Close a database connection and handle both PostgreSQL and MySQL connections properly"""
- if cnx is None:
- return
-
- try:
- # First determine the connection type
- is_psql = hasattr(cnx, 'closed') # PostgreSQL has a 'closed' attribute
-
- if is_psql:
- # PostgreSQL connection - try to return to pool first
- try:
- if not cnx.closed and pool is not None:
- pool.return_connection(cnx)
- return
- except Exception as pool_err:
- print(f"Could not return connection to pool: {str(pool_err)}")
- # Fall back to direct close if return fails
- if not cnx.closed:
- cnx.close()
- else:
- # MySQL connection - just close directly, don't try to use the pool
- if hasattr(cnx, 'close'):
- cnx.close()
- except Exception as e:
- print(f"Error closing connection: {str(e)}")
- logger.error(f"Error closing connection: {str(e)}")
-
-# For FastAPI dependency injection
-def get_database_connection():
- """FastAPI dependency for getting a database connection"""
- try:
- cnx = create_database_connection()
- yield cnx
- except HTTPException:
- raise # Re-raise the HTTPException to let FastAPI handle it properly
- except Exception as e:
- logger.error(f"Database connection error of type {type(e).__name__} with arguments: {e.args}")
- logger.error(traceback.format_exc())
- raise HTTPException(500, "Unable to connect to the database")
- finally:
- try:
- close_database_connection(cnx)
- except Exception as e:
- logger.error(f"Error in connection cleanup: {str(e)}")
diff --git a/database_functions/functions.py b/database_functions/functions.py
deleted file mode 100644
index 9d5d22ab..00000000
--- a/database_functions/functions.py
+++ /dev/null
@@ -1,15213 +0,0 @@
-import random
-import string
-import mysql.connector
-from mysql.connector import errorcode
-import mysql.connector.pooling
-import sys
-import os
-import requests
-import feedgenerator
-import datetime
-from datetime import timedelta
-import time
-import appdirs
-import base64
-import subprocess
-import psycopg
-from psycopg.rows import dict_row
-from requests.exceptions import RequestException
-from fastapi import HTTPException
-from mysql.connector import ProgrammingError
-import feedparser
-import dateutil.parser
-import re
-import requests
-from requests.auth import HTTPBasicAuth
-from urllib.parse import urlparse, urlunparse
-from typing import List, Optional
-import pytz
-from yt_dlp import YoutubeDL
-from database_functions import youtube
-from database_functions import mp3_metadata
-import logging
-from cryptography.fernet import Fernet
-from requests.exceptions import RequestException
-import shutil
-import tempfile
-import secrets
-import html
-
-# # Get the application root directory from the environment variable
-# app_root = os.environ.get('APP_ROOT')
-sys.path.append('/pinepods/'),
-# Import the functions directly from app_functions.py located in the database_functions directory
-from database_functions.app_functions import sync_subscription_change, get_podcast_values, check_valid_feed, sync_subscription_change_gpodder
-
-
-def pascal_case(snake_str):
- return ''.join(word.title() for word in snake_str.split('_'))
-
-def lowercase_keys(data):
- if isinstance(data, dict):
- return {k.lower(): (bool(v) if k.lower() == 'completed' else v) for k, v in data.items()}
- elif isinstance(data, list):
- return [lowercase_keys(item) for item in data]
- return data
-
-def convert_bools(data, database_type):
- def convert_value(k, v):
- if k.lower() == 'explicit':
- if database_type == 'postgresql':
- return v == True
- else:
- return bool(v)
- return v
-
- if isinstance(data, dict):
- return {k: convert_value(k, v) for k, v in data.items()}
- elif isinstance(data, list):
- return [convert_bools(item, database_type) for item in data]
- return data
-
-def capitalize_keys(data):
- if isinstance(data, dict):
- return {pascal_case(k): v for k, v in data.items()}
- elif isinstance(data, list):
- return [capitalize_keys(item) for item in data]
- return data
-
-def normalize_keys(data, database_type):
- if database_type == "postgresql":
- # Convert keys to PascalCase
- return {pascal_case(k): v for k, v in data.items()}
- return data
-
-def get_value(result, key, default=None):
- """
- Helper function to extract value from result set.
- It handles both dictionaries and tuples.
- """
- key_lower = key.lower()
- if isinstance(result, dict):
- # Handles keys returned as lowercase in PostgreSQL
- return result.get(key_lower, default)
- elif isinstance(result, tuple):
- # Handles keys with tuple index mapping
- key_map = {
- "podcastid": 0,
- "episodeurl": 0,
- "podcastname": 0
- }
- index = key_map.get(key_lower)
- return result[index] if index is not None else default
- return default
-
-
-
-def get_web_key(cnx, database_type):
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'SELECT APIKey FROM "APIKeys" WHERE UserID = 1'
- else:
- query = "SELECT APIKey FROM APIKeys WHERE UserID = 1"
- cursor.execute(query)
- result = cursor.fetchone()
- cursor.close()
-
- if result:
- # Handle both tuple and dictionary return types
- if isinstance(result, dict):
- return result['apikey']
- else:
- return result[0]
- return None
-
-def add_custom_podcast(database_type, cnx, feed_url, user_id, username=None, password=None):
- # Proceed to extract and use podcast details if the feed is valid
- podcast_values = get_podcast_values(feed_url, user_id, username, password)
- try:
- feed_cutoff = 30
- result = add_podcast(cnx, database_type, podcast_values, user_id, feed_cutoff, username, password)
- if not result:
- raise Exception("Failed to add the podcast.")
-
- # Handle the tuple return value
- if isinstance(result, tuple):
- podcast_id = result[0] # Extract just the podcast_id
- else:
- podcast_id = result
-
- return podcast_id
-
- except Exception as e:
- raise HTTPException(status_code=500, detail=str(e))
-
-def add_news_feed_if_not_added(database_type, cnx):
- cursor = cnx.cursor()
- try:
- # Get all admin users
- if database_type == "postgresql":
- cursor.execute('SELECT UserID FROM "Users" WHERE IsAdmin = TRUE')
- else: # MySQL or MariaDB
- cursor.execute("SELECT UserID FROM Users WHERE IsAdmin = 1")
-
- admin_users = cursor.fetchall()
- feed_url = "https://news.pinepods.online/feed.xml"
-
- # Add feed for each admin user if they don't already have it
- for admin in admin_users:
- user_id = admin[0]
-
- # Check if this user already has the news feed
- if database_type == "postgresql":
- cursor.execute('SELECT PodcastID FROM "Podcasts" WHERE UserID = %s AND FeedURL = %s', (user_id, feed_url))
- else: # MySQL or MariaDB
- cursor.execute("SELECT PodcastID FROM Podcasts WHERE UserID = %s AND FeedURL = %s", (user_id, feed_url))
-
- existing_feed = cursor.fetchone()
-
- if existing_feed is None:
- add_custom_podcast(database_type, cnx, feed_url, user_id)
- cnx.commit()
-
- except (psycopg.ProgrammingError, mysql.connector.ProgrammingError) as e:
- print(f"Error in add_news_feed_if_not_added: {e}")
- cnx.rollback()
- finally:
- cursor.close()
-
-
-def add_podcast(cnx, database_type, podcast_values, user_id, feed_cutoff, username=None, password=None, podcast_index_id=0):
- cursor = cnx.cursor()
-
- # If podcast_index_id is 0, try to fetch it from the API
- if podcast_index_id == 0:
- api_url = os.environ.get("SEARCH_API_URL", "https://api.pinepods.online/api/search")
- search_url = f"{api_url}?query={podcast_values['pod_title']}"
-
- try:
- response = requests.get(search_url)
- response.raise_for_status()
- data = response.json()
-
- if data['status'] == 'true' and data['feeds']:
- for feed in data['feeds']:
- if feed['title'] == podcast_values['pod_title']:
- podcast_index_id = feed['id']
- break
-
- if podcast_index_id == 0:
- print(f"Couldn't find PodcastIndexID for {podcast_values['pod_title']}")
- except Exception as e:
- print(f"Error fetching PodcastIndexID: {e}")
-
-
- try:
- if database_type == "postgresql":
- query = 'SELECT PodcastID, PodcastName, FeedURL FROM "Podcasts" WHERE FeedURL = %s AND UserID = %s'
- else:
- query = "SELECT PodcastID, PodcastName, FeedURL FROM Podcasts WHERE FeedURL = %s AND UserID = %s"
-
- cursor.execute(query, (podcast_values['pod_feed_url'], user_id))
- result = cursor.fetchone()
- print(f"Existing podcast check - Query result: {result}")
- print(f"Checking for feed URL: {podcast_values['pod_feed_url']}")
-
- if result is not None:
- # Print more details for debugging - handle both dict and tuple
- if isinstance(result, dict):
- print(f"Matched podcast - ID: {result['podcastid']}, Name: {result['podcastname']}, URL: {result['feedurl']}")
- podcast_id = result['podcastid']
- elif isinstance(result, tuple):
- print(f"Matched podcast - ID: {result[0]}, Name: {result[1]}, URL: {result[2]}")
- podcast_id = result[0]
- else:
- print(f"Unexpected result type: {type(result)}")
- podcast_id = result # Fallback for scalar result
-
- # Add this check right before calling add_episodes in the "if result is not None:" block
- if database_type == "postgresql":
- episode_count_query = 'SELECT COUNT(*) FROM "Episodes" WHERE PodcastID = %s'
- reset_count_query = 'UPDATE "Podcasts" SET EpisodeCount = 0 WHERE PodcastID = %s'
- else: # MySQL or MariaDB
- episode_count_query = "SELECT COUNT(*) FROM Episodes WHERE PodcastID = %s"
- reset_count_query = "UPDATE Podcasts SET EpisodeCount = 0 WHERE PodcastID = %s"
-
- # Check if there are any episodes for this podcast
- cursor.execute(episode_count_query, (podcast_id,))
- episode_count_result = cursor.fetchone()
-
- # Handle both dict and tuple for episode count result
- if isinstance(episode_count_result, dict):
- episode_count = episode_count_result.get('count', episode_count_result.get('COUNT(*)', 0))
- elif isinstance(episode_count_result, tuple):
- episode_count = episode_count_result[0]
- else:
- episode_count = episode_count_result
-
- # If there are no episodes but the podcast has a non-zero count, reset it to 0
- if episode_count == 0:
- # Get the current episode count from Podcasts table
- if database_type == "postgresql":
- podcast_count_query = 'SELECT EpisodeCount FROM "Podcasts" WHERE PodcastID = %s'
- else:
- podcast_count_query = "SELECT EpisodeCount FROM Podcasts WHERE PodcastID = %s"
-
- cursor.execute(podcast_count_query, (podcast_id,))
- podcast_count_result = cursor.fetchone()
-
- # Handle both dict and tuple for podcast count result
- if isinstance(podcast_count_result, dict):
- podcast_count = podcast_count_result.get('episodecount', podcast_count_result.get('EpisodeCount', 0))
- elif isinstance(podcast_count_result, tuple):
- podcast_count = podcast_count_result[0]
- else:
- podcast_count = podcast_count_result
-
- # If the podcast has a non-zero count but no episodes, reset it
- if podcast_count > 0:
- print(f"Resetting episode count for podcast {podcast_id} from {podcast_count} to 0")
- cursor.execute(reset_count_query, (podcast_id,))
- cnx.commit()
-
- # Now proceed with add_episodes as normal
- first_episode_id = add_episodes(cnx, database_type, podcast_id, podcast_values['pod_feed_url'],
- podcast_values['pod_artwork'], False, username=username, password=password)
- print("Episodes added for existing podcast")
- # Return both IDs like we do for new podcasts
- return podcast_id, first_episode_id
-
- # Extract category names and convert to comma-separated string
- categories = podcast_values['categories']
- print(f"Categories: {categories}")
-
- if isinstance(categories, dict):
- category_list = ', '.join(categories.values())
- elif isinstance(categories, list):
- category_list = ', '.join(categories)
- elif isinstance(categories, str):
- category_list = categories
- else:
- category_list = ''
-
- if database_type == "postgresql":
- add_podcast_query = """
- INSERT INTO "Podcasts"
- (PodcastName, ArtworkURL, Author, Categories, Description, EpisodeCount, FeedURL, WebsiteURL, Explicit, UserID, FeedCutoffDays, Username, Password, PodcastIndexID)
- VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) RETURNING PodcastID
- """
- explicit = podcast_values['pod_explicit']
- else: # MySQL or MariaDB
- add_podcast_query = """
- INSERT INTO Podcasts
- (PodcastName, ArtworkURL, Author, Categories, Description, EpisodeCount, FeedURL, WebsiteURL, Explicit, UserID, FeedCutoffDays, Username, Password, PodcastIndexID)
- VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
- """
- explicit = 1 if podcast_values['pod_explicit'] else 0
-
-
- print("Inserting into db")
- print(podcast_values['pod_title'])
- print(podcast_values['pod_artwork'])
- print(podcast_values['pod_author'])
- print(category_list)
- print(podcast_values['pod_description'])
- print(podcast_values['pod_episode_count'])
- print(podcast_values['pod_feed_url'])
- print(podcast_values['pod_website'])
- print(explicit)
- print(user_id)
- try:
- cursor.execute(add_podcast_query, (
- podcast_values['pod_title'],
- podcast_values['pod_artwork'],
- podcast_values['pod_author'],
- category_list,
- podcast_values['pod_description'],
- 0,
- podcast_values['pod_feed_url'],
- podcast_values['pod_website'],
- explicit,
- user_id,
- feed_cutoff,
- username,
- password,
- podcast_index_id
- ))
-
- if database_type == "postgresql":
- podcast_id = cursor.fetchone()
- if isinstance(podcast_id, tuple):
- podcast_id = podcast_id[0]
- elif isinstance(podcast_id, dict):
- podcast_id = podcast_id['podcastid']
- else: # MySQL or MariaDB
- cnx.commit()
- podcast_id = cursor.lastrowid
-
- print('pre-id')
- if podcast_id is None:
- logging.error("No row was inserted.")
- print("No row was inserted.")
- cursor.close()
- return False
-
- print("Got id")
- print("Inserted into db")
-
- # Update UserStats table to increment PodcastsAdded count
- if database_type == "postgresql":
- query = 'UPDATE "UserStats" SET PodcastsAdded = PodcastsAdded + 1 WHERE UserID = %s'
- else: # MySQL or MariaDB
- query = "UPDATE UserStats SET PodcastsAdded = PodcastsAdded + 1 WHERE UserID = %s"
-
- cursor.execute(query, (user_id,))
- cnx.commit()
- print("stats table updated")
-
- # Add episodes to database
- first_episode_id = add_episodes(cnx, database_type, podcast_id, podcast_values['pod_feed_url'],
- podcast_values['pod_artwork'], False, username=username, password=password, websocket=False)
- print("episodes added")
- return podcast_id, first_episode_id
-
- except Exception as e:
- logging.error(f"Failed to add podcast: {e}")
- print(f"Failed to add podcast: {e}")
- cnx.rollback()
- cursor.close()
- raise Exception(f"Failed to add podcast: {e}")
-
- except Exception as e:
- print(f"Error during podcast insertion or UserStats update: {e}")
- logging.error(f"Error during podcast insertion or UserStats update: {e}")
- cnx.rollback()
- raise
-
- finally:
- cursor.close()
-
- # Return True to indicate success
- return True
-
-
-def add_person_podcast(cnx, database_type, podcast_values, user_id, username=None, password=None, podcast_index_id=0):
- cursor = cnx.cursor()
-
- # If podcast_index_id is 0, try to fetch it from the API
- if podcast_index_id == 0:
- api_url = os.environ.get("SEARCH_API_URL", "https://api.pinepods.online/api/search")
- search_url = f"{api_url}?query={podcast_values['pod_title']}"
-
- try:
- response = requests.get(search_url)
- response.raise_for_status()
- data = response.json()
-
- if data['status'] == 'true' and data['feeds']:
- for feed in data['feeds']:
- if feed['title'] == podcast_values['pod_title']:
- podcast_index_id = feed['id']
- break
-
- if podcast_index_id == 0:
- print(f"Couldn't find PodcastIndexID for {podcast_values['pod_title']}")
- except Exception as e:
- print(f"Error fetching PodcastIndexID: {e}")
-
-
- try:
- # Check if the podcast already exists for the user
- if database_type == "postgresql":
- query = 'SELECT PodcastID FROM "Podcasts" WHERE FeedURL = %s AND UserID = %s'
- else: # MySQL or MariaDB
- query = "SELECT PodcastID FROM Podcasts WHERE FeedURL = %s AND UserID = %s"
-
- cursor.execute(query, (podcast_values['pod_feed_url'], user_id))
- result = cursor.fetchone()
- print(f"Result: {result}")
- print("Checked for existing podcast")
-
- if result is not None:
- # Podcast already exists for the user, return False
- cursor.close()
- return False
-
- # Extract category names and convert to comma-separated string
- categories = podcast_values['categories']
- print(f"Categories: {categories}")
-
- if isinstance(categories, dict):
- category_list = ', '.join(categories.values())
- elif isinstance(categories, list):
- category_list = ', '.join(categories)
- elif isinstance(categories, str):
- category_list = categories
- else:
- category_list = ''
-
- if database_type == "postgresql":
- add_podcast_query = """
- INSERT INTO "Podcasts"
- (PodcastName, ArtworkURL, Author, Categories, Description, EpisodeCount, FeedURL, WebsiteURL, Explicit, UserID, Username, Password, PodcastIndexID)
- VALUES (%s, %s, %s, %s, %s, 0, %s, %s, %s, %s, %s, %s, %s) RETURNING PodcastID
- """
- explicit = podcast_values['pod_explicit']
- else: # MySQL or MariaDB
- add_podcast_query = """
- INSERT INTO Podcasts
- (PodcastName, ArtworkURL, Author, Categories, Description, EpisodeCount, FeedURL, WebsiteURL, Explicit, UserID, Username, Password, PodcastIndexID)
- VALUES (%s, %s, %s, %s, %s, 0, %s, %s, %s, %s, %s, %s, %s)
- """
- explicit = 1 if podcast_values['pod_explicit'] else 0
-
-
- print("Inserting into db")
- print(podcast_values['pod_title'])
- print(podcast_values['pod_artwork'])
- print(podcast_values['pod_author'])
- print(category_list)
- print(podcast_values['pod_description'])
- print(podcast_values['pod_episode_count'])
- print(podcast_values['pod_feed_url'])
- print(podcast_values['pod_website'])
- print(explicit)
- print(user_id)
- try:
- cursor.execute(add_podcast_query, (
- podcast_values['pod_title'],
- podcast_values['pod_artwork'],
- podcast_values['pod_author'],
- category_list,
- podcast_values['pod_description'],
- podcast_values['pod_feed_url'],
- podcast_values['pod_website'],
- explicit,
- user_id,
- username,
- password,
- podcast_index_id
- ))
-
- if database_type == "postgresql":
- podcast_id = cursor.fetchone()
- if isinstance(podcast_id, tuple):
- podcast_id = podcast_id[0]
- elif isinstance(podcast_id, dict):
- podcast_id = podcast_id['podcastid']
- else: # MySQL or MariaDB
- cnx.commit()
- podcast_id = cursor.lastrowid
-
- print('pre-id')
- if podcast_id is None:
- logging.error("No row was inserted.")
- print("No row was inserted.")
- cursor.close()
- return False
-
- except Exception as e:
- logging.error(f"Failed to add podcast: {e}")
- print(f"Failed to add podcast: {e}")
- cnx.rollback()
- cursor.close()
- return False
-
- except Exception as e:
- print(f"Error during podcast insertion or UserStats update: {e}")
- logging.error(f"Error during podcast insertion or UserStats update: {e}")
- cnx.rollback()
- raise
-
- finally:
- cursor.close()
-
- # Return True to indicate success
- return True
-
-
-def add_user(cnx, database_type, user_values):
- cursor = cnx.cursor()
- try:
- print(f"Adding user with values: {user_values}")
- if database_type == "postgresql":
- add_user_query = """
- INSERT INTO "Users"
- (Fullname, Username, Email, Hashed_PW, IsAdmin)
- VALUES (%s, %s, %s, %s, false)
- RETURNING UserID
- """
- else: # MySQL or MariaDB
- add_user_query = """
- INSERT INTO Users
- (Fullname, Username, Email, Hashed_PW, IsAdmin)
- VALUES (%s, %s, %s, %s, 0)
- """
-
- cursor.execute(add_user_query, user_values)
-
- # Handle the user ID retrieval
- if database_type == "postgresql":
- result = cursor.fetchone()
- if result is None:
- raise Exception("Failed to create user - no ID returned")
- # Print the result for debugging
- print(f"Raw PostgreSQL result: {result}")
- logging.debug(f"Raw PostgreSQL result: {result}")
- # Handle different return types
- if isinstance(result, dict):
- # Try different case variations
- user_id = result.get('userid') or result.get('UserID') or result.get('userId') or result.get('user_id')
- else:
- user_id = result[0]
- if not user_id:
- raise Exception("Failed to create user - invalid ID returned")
- else: # MySQL or MariaDB
- # Get the last inserted ID for MySQL
- user_id = cursor.lastrowid
- if not user_id:
- raise Exception("Failed to create user - no ID returned from MySQL")
- print(f"MySQL generated user_id: {user_id}")
-
- # Add user settings
- settings_query = """
- INSERT INTO "UserSettings"
- (UserID, Theme)
- VALUES (%s, %s)
- """ if database_type == "postgresql" else """
- INSERT INTO UserSettings
- (UserID, Theme)
- VALUES (%s, %s)
- """
- cursor.execute(settings_query, (user_id, 'Nordic'))
-
- # Add user stats
- stats_query = """
- INSERT INTO "UserStats"
- (UserID)
- VALUES (%s)
- """ if database_type == "postgresql" else """
- INSERT INTO UserStats
- (UserID)
- VALUES (%s)
- """
- cursor.execute(stats_query, (user_id,))
-
- cnx.commit()
- return user_id
- except Exception as e:
- cnx.rollback()
- logging.error(f"Error in add_user: {str(e)}")
- raise
- finally:
- cursor.close()
-
-def add_admin_user(cnx, database_type, user_values):
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- add_user_query = """
- WITH inserted_user AS (
- INSERT INTO "Users"
- (Fullname, Username, Email, Hashed_PW, IsAdmin)
- VALUES (%s, %s, %s, %s, TRUE)
- ON CONFLICT (Username) DO NOTHING
- RETURNING UserID
- )
- SELECT UserID FROM inserted_user
- UNION ALL
- SELECT UserID FROM "Users" WHERE Username = %s
- LIMIT 1
- """
- # Note: we add the username as an extra parameter here
- cursor.execute(add_user_query, user_values + (user_values[1],))
- user_id = cursor.fetchone()[0]
- else: # MySQL or MariaDB
- add_user_query = """
- INSERT INTO Users
- (Fullname, Username, Email, Hashed_PW, IsAdmin)
- VALUES (%s, %s, %s, %s, 1)
- """
- cursor.execute(add_user_query, user_values)
- user_id = cursor.lastrowid
-
- # Now add settings and stats
- if database_type == "postgresql":
- add_user_settings_query = """
- INSERT INTO "UserSettings"
- (UserID, Theme)
- VALUES (%s, %s)
- """
- else:
- add_user_settings_query = """
- INSERT INTO UserSettings
- (UserID, Theme)
- VALUES (%s, %s)
- """
- cursor.execute(add_user_settings_query, (user_id, 'Nordic'))
-
- if database_type == "postgresql":
- add_user_stats_query = """
- INSERT INTO "UserStats"
- (UserID)
- VALUES (%s)
- """
- else:
- add_user_stats_query = """
- INSERT INTO UserStats
- (UserID)
- VALUES (%s)
- """
- cursor.execute(add_user_stats_query, (user_id,))
- cnx.commit()
- return user_id
- finally:
- cursor.close()
-
-def add_oidc_provider(cnx, database_type, provider_values):
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- add_provider_query = """
- INSERT INTO "OIDCProviders"
- (ProviderName, ClientID, ClientSecret, AuthorizationURL,
- TokenURL, UserInfoURL, ButtonText, Scope,
- ButtonColor, ButtonTextColor, IconSVG, NameClaim, EmailClaim,
- UsernameClaim, RolesClaim, UserRole, AdminRole)
- VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,
- %s, %s, %s)
- RETURNING ProviderID
- """
- else: # MySQL
- add_provider_query = """
- INSERT INTO OIDCProviders
- (ProviderName, ClientID, ClientSecret, AuthorizationURL,
- TokenURL, UserInfoURL, ButtonText, Scope,
- ButtonColor, ButtonTextColor, IconSVG, NameClaim, EmailClaim,
- UsernameClaim, RolesClaim, UserRole, AdminRole)
- VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s,
- %s, %s, %s)
- """
- cursor.execute(add_provider_query, provider_values)
-
- if database_type == "postgresql":
- result = cursor.fetchone()
- if isinstance(result, dict):
- provider_id = result.get('providerid') or result.get('ProviderID') or result.get('provider_id')
- else:
- provider_id = result[0]
- else:
- provider_id = cursor.lastrowid
-
- cnx.commit()
- return provider_id
- except Exception as e:
- cnx.rollback()
- logging.error(f"Error in add_oidc_provider: {str(e)}")
- raise
- finally:
- cursor.close()
-
-def remove_oidc_provider(cnx, database_type, provider_id):
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- delete_query = """
- DELETE FROM "OIDCProviders"
- WHERE ProviderID = %s
- """
- else:
- delete_query = """
- DELETE FROM OIDCProviders
- WHERE ProviderID = %s
- """
- cursor.execute(delete_query, (provider_id,))
- rows_affected = cursor.rowcount
- cnx.commit()
- return rows_affected > 0
- except Exception as e:
- cnx.rollback()
- logging.error(f"Error in remove_oidc_provider: {str(e)}")
- raise
- finally:
- cursor.close()
-
-def list_oidc_providers(cnx, database_type):
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- list_query = """
- SELECT ProviderID, ProviderName, ClientID, AuthorizationURL,
- TokenURL, UserInfoURL, ButtonText, Scope, ButtonColor,
- ButtonTextColor, IconSVG, NameClaim, EmailClaim, UsernameClaim,
- RolesClaim, UserRole, AdminRole, Enabled, Created, Modified
- FROM "OIDCProviders"
- ORDER BY ProviderName
- """
- else:
- list_query = """
- SELECT ProviderID, ProviderName, ClientID, AuthorizationURL,
- TokenURL, UserInfoURL, ButtonText, Scope, ButtonColor,
- ButtonTextColor, IconSVG, NameClaim, EmailClaim, UsernameClaim,
- RolesClaim, UserRole, AdminRole, Enabled, Created, Modified
- FROM OIDCProviders
- ORDER BY ProviderName
- """
- cursor.execute(list_query)
- if database_type == "postgresql":
- results = cursor.fetchall()
- providers = []
- for row in results:
- if isinstance(row, dict):
- # For dict results, normalize the keys
- normalized = {}
- for key, value in row.items():
- normalized_key = key.lower()
- if normalized_key == "providerid":
- normalized["provider_id"] = value
- elif normalized_key == "providername":
- normalized["provider_name"] = value
- elif normalized_key == "clientid":
- normalized["client_id"] = value
- elif normalized_key == "authorizationurl":
- normalized["authorization_url"] = value
- elif normalized_key == "tokenurl":
- normalized["token_url"] = value
- elif normalized_key == "userinfourl":
- normalized["user_info_url"] = value
- elif normalized_key == "buttontext":
- normalized["button_text"] = value
- elif normalized_key == "buttoncolor":
- normalized["button_color"] = value
- elif normalized_key == "buttontextcolor":
- normalized["button_text_color"] = value
- elif normalized_key == "iconsvg":
- normalized["icon_svg"] = value
- elif normalized_key == "nameclaim":
- normalized["name_claim"] = value
- elif normalized_key == "emailclaim":
- normalized["email_claim"] = value
- elif normalized_key == "usernameclaim":
- normalized["username_claim"] = value
- elif normalized_key == "rolesclaim":
- normalized["roles_claim"] = value
- elif normalized_key == "userrole":
- normalized["user_role"] = value
- elif normalized_key == "adminrole":
- normalized["admin_role"] = value
- else:
- normalized[normalized_key] = value
- providers.append(normalized)
- else:
- # For tuple results, use the existing mapping
- providers.append({
- 'provider_id': row[0],
- 'provider_name': row[1],
- 'client_id': row[2],
- 'authorization_url': row[3],
- 'token_url': row[4],
- 'user_info_url': row[5],
- 'button_text': row[6],
- 'scope': row[7],
- 'button_color': row[8],
- 'button_text_color': row[9],
- 'icon_svg': row[10],
- 'name_claim': row[11],
- 'email_claim': row[12],
- 'username_claim': row[13],
- 'roles_claim': row[14],
- 'user_role': row[15],
- 'admin_role': row[16],
- 'enabled': row[17],
- 'created': row[18],
- 'modified': row[19]
- })
- else:
- columns = [col[0] for col in cursor.description]
- results = [dict(zip(columns, row)) for row in cursor.fetchall()]
- # Normalize MySQL results the same way
- providers = []
- for row in results:
- normalized = {}
- for key, value in row.items():
- normalized_key = key.lower()
- if normalized_key == "providerid":
- normalized["provider_id"] = value
- elif normalized_key == "providername":
- normalized["provider_name"] = value
- elif normalized_key == "clientid":
- normalized["client_id"] = value
- elif normalized_key == "authorizationurl":
- normalized["authorization_url"] = value
- elif normalized_key == "tokenurl":
- normalized["token_url"] = value
- elif normalized_key == "userinfourl":
- normalized["user_info_url"] = value
- elif normalized_key == "buttontext":
- normalized["button_text"] = value
- elif normalized_key == "buttoncolor":
- normalized["button_color"] = value
- elif normalized_key == "buttontextcolor":
- normalized["button_text_color"] = value
- elif normalized_key == "iconsvg":
- normalized["icon_svg"] = value
- elif normalized_key == "nameclaim":
- normalized["name_claim"] = value
- elif normalized_key == "emailclaim":
- normalized["email_claim"] = value
- elif normalized_key == "usernameclaim":
- normalized["username_claim"] = value
- elif normalized_key == "rolesclaim":
- normalized["roles_claim"] = value
- elif normalized_key == "userrole":
- normalized["user_role"] = value
- elif normalized_key == "adminrole":
- normalized["admin_role"] = value
- elif normalized_key == "enabled":
- # Convert MySQL TINYINT to boolean
- normalized["enabled"] = bool(value)
- else:
- normalized[normalized_key] = value
- providers.append(normalized)
- return providers
- except Exception as e:
- logging.error(f"Error in list_oidc_providers: {str(e)}")
- raise
- finally:
- cursor.close()
-
-def get_public_oidc_providers(cnx, database_type):
- """Get minimal provider info needed for login buttons."""
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- query = '''
- SELECT
- ProviderID,
- ProviderName,
- ClientID,
- AuthorizationURL,
- Scope,
- ButtonColor,
- ButtonText,
- ButtonTextColor,
- IconSVG
- FROM "OIDCProviders"
- WHERE Enabled = TRUE
- '''
- else:
- query = '''
- SELECT
- ProviderID,
- ProviderName,
- ClientID,
- AuthorizationURL,
- Scope,
- ButtonColor,
- ButtonText,
- ButtonTextColor,
- IconSVG
- FROM OIDCProviders
- WHERE Enabled = TRUE
- '''
- cursor.execute(query)
- results = cursor.fetchall()
- providers = []
-
- for row in results:
- if isinstance(row, dict):
- # For dict results, normalize the keys
- normalized = {}
- for key, value in row.items():
- normalized_key = key.lower()
- if normalized_key == "providerid":
- normalized["provider_id"] = value
- elif normalized_key == "providername":
- normalized["provider_name"] = value
- elif normalized_key == "clientid":
- normalized["client_id"] = value
- elif normalized_key == "authorizationurl":
- normalized["authorization_url"] = value
- elif normalized_key == "buttoncolor":
- normalized["button_color"] = value
- elif normalized_key == "buttontext":
- normalized["button_text"] = value
- elif normalized_key == "buttontextcolor":
- normalized["button_text_color"] = value
- elif normalized_key == "iconsvg":
- normalized["icon_svg"] = value
- else:
- normalized[normalized_key] = value
- providers.append(normalized)
- else:
- # For tuple results, use index-based mapping
- providers.append({
- "provider_id": row[0],
- "provider_name": row[1],
- "client_id": row[2],
- "authorization_url": row[3],
- "scope": row[4],
- "button_color": row[5],
- "button_text": row[6],
- "button_text_color": row[7],
- "icon_svg": row[8]
- })
-
- return providers
- except Exception as e:
- logging.error(f"Error in get_public_oidc_providers: {str(e)}")
- raise
- finally:
- cursor.close()
-
-def get_pinepods_version():
- try:
- with open('/pinepods/current_version', 'r') as file:
- version = file.read().strip()
- if not version:
- return 'dev_mode'
- return version
- except FileNotFoundError:
- return "Version file not found."
- except Exception as e:
- return f"An error occurred: {e}"
-
-def get_first_episode_id(cnx, database_type, podcast_id, is_youtube=False):
- print('getting first ep id')
- cursor = cnx.cursor()
- try:
- if is_youtube:
- if database_type == "postgresql":
- query = 'SELECT VIDEOID FROM "YouTubeVideos" WHERE PODCASTID = %s ORDER BY PUBLISHEDAT ASC LIMIT 1'
- else: # MySQL or MariaDB
- query = "SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s ORDER BY PublishedAt ASC LIMIT 1"
- else:
- if database_type == "postgresql":
- query = 'SELECT EPISODEID FROM "Episodes" WHERE PODCASTID = %s ORDER BY EPISODEPUBDATE ASC LIMIT 1'
- else: # MySQL or MariaDB
- query = "SELECT EpisodeID FROM Episodes WHERE PodcastID = %s ORDER BY EpisodePubDate ASC LIMIT 1"
- print(f'request finish')
- cursor.execute(query, (podcast_id,))
- result = cursor.fetchone()
- print(f'request result {result}')
- if isinstance(result, dict):
- return result.get("videoid" if is_youtube else "episodeid") if result else None
- elif isinstance(result, tuple):
- return result[0] if result else None
- else:
- return None
- finally:
- cursor.close()
-
-def try_fetch_feed(url, username=None, password=None):
- headers = {
- 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
- 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8',
- 'Accept-Language': 'en-US,en;q=0.9',
- # Remove 'Accept-Encoding' to let requests handle decompression automatically
- 'Connection': 'keep-alive',
- 'Upgrade-Insecure-Requests': '1',
- 'Sec-Fetch-Dest': 'document',
- 'Sec-Fetch-Mode': 'navigate',
- 'Sec-Fetch-Site': 'none',
- 'Sec-Fetch-User': '?1'
- }
- auth = HTTPBasicAuth(username, password) if username and password else None
-
- # Try the original URL first
- try:
- response = requests.get(
- url,
- auth=auth,
- headers=headers,
- timeout=30,
- allow_redirects=True
- )
- response.raise_for_status()
-
- # Check content type to ensure we're getting XML or text
- content_type = response.headers.get('Content-Type', '')
- if 'xml' not in content_type.lower() and 'text' not in content_type.lower():
- print(f"Warning: Unexpected content type: {content_type}")
-
- # Get the text rather than binary content
- # Try first with detected encoding, fall back to UTF-8
- try:
- return response.text
- except UnicodeDecodeError:
- return response.content.decode('utf-8', errors='replace')
- except RequestException as e:
- print(f"Error fetching {url}: {str(e)}")
-
- # If original URL failed and starts with https, try with http
- if url.startswith('https://'):
- http_url = 'http://' + url[8:] # Replace https:// with http://
- print(f"Trying HTTP fallback: {http_url}")
- try:
- response = requests.get(
- http_url,
- auth=auth,
- headers=headers,
- timeout=30,
- allow_redirects=True
- )
- response.raise_for_status()
- return response.content
- except RequestException as e2:
- print(f"Error fetching HTTP fallback {http_url}: {str(e2)}")
-
- # Original URL and HTTP fallback both failed
- return None
-
-def parse_duration(duration_string: str) -> int:
- # First, check if duration is in seconds (no colons)
- if ':' not in duration_string:
- try:
- # Directly return seconds if no colon is found
- return int(duration_string)
- except ValueError:
- print(f'Error parsing duration from pure seconds: {duration_string}')
- return 0 # Return 0 or some default value in case of error
- else:
- # Handle HH:MM:SS format
- parts = duration_string.split(':')
- if len(parts) == 1:
- # If there's only one part, it's in seconds
- return int(parts[0])
- else:
- while len(parts) < 3:
- parts.insert(0, '0') # Prepend zeros if any parts are missing (ensuring HH:MM:SS format)
- h, m, s = map(int, parts)
- return h * 3600 + m * 60 + s
-
-def update_episode_count(cnx, database_type, podcast_id):
- """Recalculate and update episode count for a podcast"""
- cursor = cnx.cursor()
- print(f'Updating episode count for podcast {podcast_id}')
- try:
- # Count both regular episodes and YouTube videos
- if database_type == "postgresql":
- episode_count_query = 'SELECT COUNT(*) FROM "Episodes" WHERE PodcastID = %s'
- video_count_query = 'SELECT COUNT(*) FROM "YouTubeVideos" WHERE PodcastID = %s'
- update_query = 'UPDATE "Podcasts" SET EpisodeCount = %s WHERE PodcastID = %s'
- verify_query = 'SELECT EpisodeCount FROM "Podcasts" WHERE PodcastID = %s'
- else: # MySQL or MariaDB
- episode_count_query = 'SELECT COUNT(*) FROM Episodes WHERE PodcastID = %s'
- video_count_query = 'SELECT COUNT(*) FROM YouTubeVideos WHERE PodcastID = %s'
- update_query = "UPDATE Podcasts SET EpisodeCount = %s WHERE PodcastID = %s"
- verify_query = 'SELECT EpisodeCount FROM Podcasts WHERE PodcastID = %s'
-
- # Get episode count
- cursor.execute(episode_count_query, (podcast_id,))
- episode_result = cursor.fetchone()
- episode_count = 0
- if episode_result:
- if isinstance(episode_result, tuple):
- episode_count = episode_result[0]
- elif isinstance(episode_result, dict):
- episode_count = episode_result["count"]
-
- # Get video count
- cursor.execute(video_count_query, (podcast_id,))
- video_result = cursor.fetchone()
- video_count = 0
- if video_result:
- if isinstance(video_result, tuple):
- video_count = video_result[0]
- elif isinstance(video_result, dict):
- video_count = video_result["count"]
-
- # Total count
- total_count = episode_count + video_count
- # Update total count
- cursor.execute(update_query, (total_count, podcast_id))
- # Verify the update
- cursor.execute(verify_query, (podcast_id,))
- verify_result = cursor.fetchone()
- final_count = 0
- if verify_result:
- if isinstance(verify_result, tuple):
- final_count = verify_result[0]
- elif isinstance(verify_result, dict):
- final_count = verify_result["episodecount"]
-
- cnx.commit()
-
- except Exception as e:
- print(f'Error updating content count for podcast {podcast_id}: {str(e)}')
- cnx.rollback()
- raise
- finally:
- cursor.close()
-
-def add_episodes(cnx, database_type, podcast_id, feed_url, artwork_url, auto_download, username=None, password=None, websocket=False):
- import feedparser
- first_episode_id = None
-
- # Try to fetch the feed
- content = try_fetch_feed(feed_url, username, password)
-
- if content is None:
- # If the original URL fails, try switching between www and non-www
- parsed_url = urlparse(feed_url)
- if parsed_url.netloc.startswith('www.'):
- alternate_netloc = parsed_url.netloc[4:]
- else:
- alternate_netloc = 'www.' + parsed_url.netloc
-
- alternate_url = urlunparse(parsed_url._replace(netloc=alternate_netloc))
- content = try_fetch_feed(alternate_url, username, password)
-
- if content is None:
- raise ValueError(f"Failed to fetch feed from both {feed_url} and its www/non-www alternative")
-
- episode_dump = feedparser.parse(content)
-
- cursor = cnx.cursor()
-
- new_episodes = []
-
- for entry in episode_dump.entries:
- # Check necessary fields are present
- if not all(hasattr(entry, attr) for attr in ["title", "summary", "enclosures"]):
- continue
-
- # Title is required - if missing, skip this episode
- if not hasattr(entry, 'title') or not entry.title:
- continue
-
- parsed_title = entry.title
-
- # Description - use placeholder if missing
- parsed_description = entry.get('content', [{}])[0].get('value') or entry.get('summary') or "No description available"
-
- # Audio URL can be empty (non-audio posts are allowed)
- parsed_audio_url = entry.enclosures[0].href if entry.enclosures else ""
-
- # Release date - use current time as fallback if parsing fails
- try:
- parsed_release_datetime = dateutil.parser.parse(entry.published).strftime("%Y-%m-%d %H:%M:%S")
- except (AttributeError, ValueError):
- parsed_release_datetime = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
-
- # Artwork - use placeholders based on feed name/episode number
- parsed_artwork_url = (entry.get('itunes_image', {}).get('href') or
- getattr(entry, 'image', {}).get('href') or
- artwork_url or # This is the podcast's default artwork
- '/static/assets/default-episode.png') # Final fallback artwork
-
- # Duration parsing
- def estimate_duration_from_file_size(file_size_bytes, bitrate_kbps=128):
- """
- Estimate duration in seconds based on file size and bitrate.
-
- Args:
- file_size_bytes (int): Size of the media file in bytes
- bitrate_kbps (int): Bitrate in kilobits per second (default: 128)
-
- Returns:
- int: Estimated duration in seconds
- """
- bytes_per_second = (bitrate_kbps * 1000) / 8 # Convert kbps to bytes per second
- return int(file_size_bytes / bytes_per_second)
-
- # Duration parsing section for the add_episodes function
- parsed_duration = 0
- duration_str = getattr(entry, 'itunes_duration', '')
- if ':' in duration_str:
- # If duration contains ":", then process as HH:MM:SS or MM:SS
- time_parts = list(map(int, duration_str.split(':')))
- while len(time_parts) < 3:
- time_parts.insert(0, 0) # Pad missing values with zeros
-
- # Fix for handling more than 3 time parts
- if len(time_parts) > 3:
- print(f"Warning: Duration string '{duration_str}' has more than 3 parts, using first 3")
- h, m, s = time_parts[0], time_parts[1], time_parts[2]
- else:
- h, m, s = time_parts
-
- parsed_duration = h * 3600 + m * 60 + s
- elif duration_str.isdigit():
- # If duration is all digits (no ":"), treat as seconds directly
- parsed_duration = int(duration_str)
- elif hasattr(entry, 'itunes_duration_seconds'):
- # Additional format as fallback, if explicitly provided as seconds
- parsed_duration = int(entry.itunes_duration_seconds)
- elif hasattr(entry, 'duration'):
- # Other specified duration formats (assume they are in correct format or seconds)
- parsed_duration = parse_duration(entry.duration)
- elif hasattr(entry, 'length'):
- # If duration not specified but length is, use length (assuming it's in seconds)
- parsed_duration = int(entry.length)
- # Check for enclosure length attribute as a last resort
- elif entry.enclosures and len(entry.enclosures) > 0:
- enclosure = entry.enclosures[0]
- if hasattr(enclosure, 'length') and enclosure.length:
- try:
- file_size = int(enclosure.length)
- # Only estimate if the size seems reasonable (to avoid errors)
- if file_size > 1000000: # Only consider files larger than 1MB
- parsed_duration = estimate_duration_from_file_size(file_size)
- # print(f"Estimated duration from file size {file_size} bytes: {parsed_duration} seconds")
- except (ValueError, TypeError) as e:
- print(f"Error parsing enclosure length: {e}")
-
-
- # Check for existing episode
- if database_type == "postgresql":
- episode_check_query = 'SELECT * FROM "Episodes" WHERE PodcastID = %s AND EpisodeTitle = %s'
- else: # MySQL or MariaDB
- episode_check_query = "SELECT * FROM Episodes WHERE PodcastID = %s AND EpisodeTitle = %s"
-
- cursor.execute(episode_check_query, (podcast_id, parsed_title))
- if cursor.fetchone():
- continue # Episode already exists
- print("inserting now")
- # Insert the new episode
- if database_type == "postgresql":
- episode_insert_query = """
- INSERT INTO "Episodes"
- (PodcastID, EpisodeTitle, EpisodeDescription, EpisodeURL, EpisodeArtwork, EpisodePubDate, EpisodeDuration)
- VALUES (%s, %s, %s, %s, %s, %s, %s)
- """
- else: # MySQL or MariaDB
- episode_insert_query = """
- INSERT INTO Episodes
- (PodcastID, EpisodeTitle, EpisodeDescription, EpisodeURL, EpisodeArtwork, EpisodePubDate, EpisodeDuration)
- VALUES (%s, %s, %s, %s, %s, %s, %s)
- """
-
- cursor.execute(episode_insert_query, (podcast_id, parsed_title, parsed_description, parsed_audio_url, parsed_artwork_url, parsed_release_datetime, parsed_duration))
- print('episodes inserted')
- update_episode_count(cnx, database_type, podcast_id)
- # Get the EpisodeID for the newly added episode
- if cursor.rowcount > 0:
- print(f"Added episode '{parsed_title}'")
- check_and_send_notification(cnx, database_type, podcast_id, parsed_title)
- if websocket:
- # Get the episode ID using a SELECT query right after insert
- if database_type == "postgresql":
- cursor.execute("""
- SELECT EpisodeID FROM "Episodes"
- WHERE PodcastID = %s AND EpisodeTitle = %s AND EpisodeURL = %s
- """, (podcast_id, parsed_title, parsed_audio_url))
- else:
- cursor.execute("""
- SELECT EpisodeID FROM Episodes
- WHERE PodcastID = %s AND EpisodeTitle = %s AND EpisodeURL = %s
- """, (podcast_id, parsed_title, parsed_audio_url))
-
- episode_id = cursor.fetchone()
- if isinstance(episode_id, dict):
- episode_id = episode_id.get('episodeid')
- elif isinstance(episode_id, tuple):
- episode_id = episode_id[0]
-
- episode_data = {
- "episode_id": episode_id,
- "podcast_id": podcast_id,
- "title": parsed_title,
- "description": parsed_description,
- "audio_url": parsed_audio_url,
- "artwork_url": parsed_artwork_url,
- "release_datetime": parsed_release_datetime,
- "duration": parsed_duration,
- "completed": False # Assuming this is the default for new episodes
- }
- new_episodes.append(episode_data)
- if auto_download: # Check if auto-download is enabled
- episode_id = get_episode_id(cnx, database_type, podcast_id, parsed_title, parsed_audio_url)
-
- user_id = get_user_id_from_pod_id(cnx, database_type, podcast_id)
- # Call your download function here
- download_podcast(cnx, database_type, episode_id, user_id)
-
- cnx.commit()
-
- # Now, retrieve the first episode ID
- if not websocket and first_episode_id is None:
- print(f'getting first id pre')
- first_episode_id = get_first_episode_id(cnx, database_type, podcast_id)
- print(f'first result {first_episode_id}')
- if websocket:
- return new_episodes
- return first_episode_id
-
-
-
-
-def check_existing_channel_subscription(cnx, database_type: str, channel_id: str, user_id: int) -> Optional[int]:
- """Check if user is already subscribed to this channel"""
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- query = """
- SELECT PodcastID FROM "Podcasts"
- WHERE WebsiteURL = %s AND UserID = %s
- """
- else: # MariaDB
- query = """
- SELECT PodcastID FROM Podcasts
- WHERE WebsiteURL = %s AND UserID = %s
- """
-
- cursor.execute(query, (f"https://www.youtube.com/channel/{channel_id}", user_id))
- result = cursor.fetchone()
- return result[0] if result else None
- except Exception as e:
- raise e
-
-def add_youtube_channel(cnx, database_type: str, channel_info: dict, user_id: int, feed_cutoff: int) -> int:
- """Add YouTube channel to Podcasts table"""
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- query = """
- INSERT INTO "Podcasts" (
- PodcastName, FeedURL, ArtworkURL, Author, Description,
- WebsiteURL, UserID, IsYouTubeChannel, Categories, FeedCutoffDays
- ) VALUES (%s, %s, %s, %s, %s, %s, %s, TRUE, %s, %s)
- RETURNING PodcastID
- """
- else: # MariaDB
- query = """
- INSERT INTO Podcasts (
- PodcastName, FeedURL, ArtworkURL, Author, Description,
- WebsiteURL, UserID, IsYouTubeChannel, Categories, FeedCutoffDays
- ) VALUES (%s, %s, %s, %s, %s, %s, %s, 1, %s, %s)
- """
-
- values = (
- channel_info['name'],
- f"https://www.youtube.com/channel/{channel_info['channel_id']}",
- channel_info['thumbnail_url'],
- channel_info['name'],
- channel_info['description'],
- f"https://www.youtube.com/channel/{channel_info['channel_id']}",
- user_id,
- "",
- feed_cutoff
- )
-
- cursor.execute(query, values)
- if database_type == "postgresql":
- result = cursor.fetchone()
- if result is None:
- raise ValueError("No result returned from insert")
- # Handle both tuple and dict return types
- if isinstance(result, dict):
- podcast_id = result.get('podcastid')
- if podcast_id is None:
- raise ValueError("No podcast ID in result dict")
- else: # it's a tuple
- podcast_id = result[0]
- cnx.commit() # Add this line for PostgreSQL
- else: # MariaDB
- podcast_id = cursor.lastrowid
- cnx.commit()
- return podcast_id
- except Exception as e:
- print(f"Error in add_youtube_channel: {str(e)}")
- cnx.rollback()
- raise e
-
-def add_youtube_videos(cnx, database_type: str, podcast_id: int, videos: list):
- """Add YouTube videos to YouTubeVideos table"""
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- query = """
- INSERT INTO "YouTubeVideos" (
- PodcastID, VideoTitle, VideoDescription,
- VideoURL, ThumbnailURL, PublishedAt,
- Duration, YouTubeVideoID
- ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
- """
- else: # MariaDB
- query = """
- INSERT INTO YouTubeVideos (
- PodcastID, VideoTitle, VideoDescription,
- VideoURL, ThumbnailURL, PublishedAt,
- Duration, YouTubeVideoID
- ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
- """
-
- for video in videos:
- cursor.execute(query, (
- podcast_id,
- video['title'],
- video['description'],
- video['url'],
- video['thumbnail'],
- video['publish_date'],
- video['duration'],
- video['id']
- ))
- # Update episode count for each video added
- update_episode_count(cnx, database_type, podcast_id)
-
- cnx.commit()
- except Exception as e:
- cnx.rollback()
- raise e
-
-def cleanup_old_youtube_videos(cnx, database_type):
- """Periodically cleanup old YouTube videos for all channels"""
- import logging
-
- logger = logging.getLogger(__name__)
- cursor = cnx.cursor()
-
- try:
- # Get all YouTube channels and their cutoff settings
- if database_type == "postgresql":
- query = """
- SELECT PodcastID, FeedCutoffDays
- FROM "Podcasts"
- WHERE IsYouTubeChannel = TRUE AND FeedCutoffDays > 0
- """
- else:
- query = """
- SELECT PodcastID, FeedCutoffDays
- FROM Podcasts
- WHERE IsYouTubeChannel = TRUE AND FeedCutoffDays > 0
- """
-
- cursor.execute(query)
- channels = cursor.fetchall()
-
- for channel in channels:
- podcast_id = channel[0] if isinstance(channel, tuple) else channel["podcastid"] if database_type == "postgresql" else channel["PodcastID"]
- feed_cutoff = channel[1] if isinstance(channel, tuple) else channel["feedcutoffdays"] if database_type == "postgresql" else channel["FeedCutoffDays"]
-
- cutoff_date = datetime.datetime.now(datetime.timezone.utc) - timedelta(days=feed_cutoff)
- logger.info(f"Cleaning up channel {podcast_id} with cutoff {feed_cutoff} days")
-
- remove_old_youtube_videos(cnx, database_type, podcast_id, cutoff_date)
-
- except Exception as e:
- logger.error(f"Error during YouTube cleanup: {str(e)}")
- raise e
- finally:
- cursor.close()
-
-def remove_old_youtube_videos(cnx, database_type: str, podcast_id: int, cutoff_date: datetime.datetime):
- """Remove YouTube videos older than cutoff date and their associated files"""
- import os
- import logging
-
- logger = logging.getLogger(__name__)
- cursor = cnx.cursor()
-
- try:
- # First, get all videos older than cutoff date
- if database_type == "postgresql":
- query = """
- SELECT VideoID, YouTubeVideoID, VideoURL
- FROM "YouTubeVideos"
- WHERE PodcastID = %s AND PublishedAt < %s
- """
- else:
- query = """
- SELECT VideoID, YouTubeVideoID, VideoURL
- FROM YouTubeVideos
- WHERE PodcastID = %s AND PublishedAt < %s
- """
-
- cursor.execute(query, (podcast_id, cutoff_date))
- old_videos = cursor.fetchall()
-
- if not old_videos:
- logger.info(f"No videos to remove for podcast {podcast_id}")
- return
-
- # Extract the VideoID list for database cleanup
- video_ids = []
- youtube_video_ids = []
-
- for video in old_videos:
- if isinstance(video, tuple):
- video_id, youtube_video_id, _ = video
- else:
- if database_type == "postgresql":
- video_id = video["videoid"]
- youtube_video_id = video["youtubevideoid"]
- else:
- video_id = video["VideoID"]
- youtube_video_id = video["YouTubeVideoID"]
-
- video_ids.append(video_id)
- youtube_video_ids.append(youtube_video_id)
-
- # Delete the MP3 file
- file_paths = [
- f"/opt/pinepods/downloads/youtube/{youtube_video_id}.mp3",
- f"/opt/pinepods/downloads/youtube/{youtube_video_id}.mp3.mp3" # In case of double extension
- ]
-
- for file_path in file_paths:
- if os.path.exists(file_path):
- try:
- os.remove(file_path)
- logger.info(f"Deleted file: {file_path}")
- except Exception as e:
- logger.error(f"Failed to delete file {file_path}: {str(e)}")
-
- # Now clean up all references to these videos in other tables
- if video_ids: # Only proceed if we have videos to delete
- # Create placeholders for the IN clause
- placeholders = ','.join(['%s'] * len(video_ids))
-
- if database_type == "postgresql":
- # Delete from all related YouTube video tables
- delete_playlist_contents = f'DELETE FROM "PlaylistContents" WHERE VideoID IN ({placeholders})'
- delete_history = f'DELETE FROM "UserVideoHistory" WHERE VideoID IN ({placeholders})'
- delete_downloaded = f'DELETE FROM "DownloadedVideos" WHERE VideoID IN ({placeholders})'
- delete_saved = f'DELETE FROM "SavedVideos" WHERE VideoID IN ({placeholders})'
- delete_queue = f'DELETE FROM "EpisodeQueue" WHERE EpisodeID IN ({placeholders}) AND is_youtube = true'
- delete_videos = f'DELETE FROM "YouTubeVideos" WHERE VideoID IN ({placeholders})'
- else:
- # Delete from all related YouTube video tables
- delete_playlist_contents = f'DELETE FROM PlaylistContents WHERE VideoID IN ({placeholders})'
- delete_history = f'DELETE FROM UserVideoHistory WHERE VideoID IN ({placeholders})'
- delete_downloaded = f'DELETE FROM DownloadedVideos WHERE VideoID IN ({placeholders})'
- delete_saved = f'DELETE FROM SavedVideos WHERE VideoID IN ({placeholders})'
- delete_queue = f'DELETE FROM EpisodeQueue WHERE EpisodeID IN ({placeholders}) AND is_youtube = 1'
- delete_videos = f'DELETE FROM YouTubeVideos WHERE VideoID IN ({placeholders})'
-
- # Execute all deletion statements
- cursor.execute(delete_playlist_contents, video_ids)
- logger.info(f"Deleted playlist content references for {cursor.rowcount} videos")
-
- cursor.execute(delete_history, video_ids)
- logger.info(f"Deleted history entries for {cursor.rowcount} videos")
-
- cursor.execute(delete_downloaded, video_ids)
- logger.info(f"Deleted downloaded entries for {cursor.rowcount} videos")
-
- cursor.execute(delete_saved, video_ids)
- logger.info(f"Deleted saved entries for {cursor.rowcount} videos")
-
- cursor.execute(delete_queue, video_ids)
- logger.info(f"Deleted queue entries for {cursor.rowcount} videos")
-
- cursor.execute(delete_videos, video_ids)
- logger.info(f"Deleted {cursor.rowcount} videos from YouTubeVideos table")
-
- # Update episode count
- update_episode_count(cnx, database_type, podcast_id)
-
- cnx.commit()
- logger.info(f"Successfully removed {len(video_ids)} old videos and all references for podcast {podcast_id}")
-
- except Exception as e:
- cnx.rollback()
- logger.error(f"Error removing old YouTube videos for podcast {podcast_id}: {str(e)}")
- raise e
- finally:
- cursor.close()
-
-def add_people_episodes(cnx, database_type, person_id: int, podcast_id: int, feed_url: str):
- import feedparser
- import dateutil.parser
- try:
- headers = {
- 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3',
- 'Accept-Language': 'en-US,en;q=0.9'
- }
- content = feedparser.parse(feed_url, request_headers=headers)
- cursor = cnx.cursor()
-
- # Start a transaction
- if database_type == "postgresql":
- cursor.execute("BEGIN")
-
- # Get existing episode IDs before processing
- if database_type == "postgresql":
- existing_query = """
- SELECT EpisodeID FROM "PeopleEpisodes"
- WHERE PersonID = %s::integer
- AND PodcastID = %s::integer
- """
- else:
- existing_query = """
- SELECT EpisodeID FROM PeopleEpisodes
- WHERE PersonID = %s
- AND PodcastID = %s
- """
-
- cursor.execute(existing_query, (person_id, podcast_id))
- existing_episodes = {row[0] for row in cursor.fetchall()}
- processed_episodes = set()
-
- for entry in content.entries:
- if not all(hasattr(entry, attr) for attr in ["title", "summary"]):
- continue
-
- # Extract episode information using more robust parsing
- parsed_title = entry.title
- parsed_description = entry.get('content', [{}])[0].get('value', entry.summary)
-
- # Get audio URL from enclosures
- parsed_audio_url = ""
- for enclosure in entry.get('enclosures', []):
- if enclosure.get('type', '').startswith('audio/'):
- parsed_audio_url = enclosure.get('href', '')
- break
-
- if not parsed_audio_url:
- continue
-
- # Parse publish date
- try:
- parsed_release_datetime = dateutil.parser.parse(entry.published).strftime("%Y-%m-%d %H:%M:%S")
- except (AttributeError, ValueError):
- parsed_release_datetime = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
-
- # Get artwork URL with fallbacks
- parsed_artwork_url = (entry.get('itunes_image', {}).get('href') or
- getattr(entry, 'image', {}).get('href'))
-
- # Duration parsing with multiple fallbacks
- parsed_duration = 0
- duration_str = getattr(entry, 'itunes_duration', '')
- if ':' in duration_str:
- time_parts = list(map(int, duration_str.split(':')))
- while len(time_parts) < 3:
- time_parts.insert(0, 0)
- h, m, s = time_parts
- parsed_duration = h * 3600 + m * 60 + s
- elif duration_str.isdigit():
- parsed_duration = int(duration_str)
- elif hasattr(entry, 'itunes_duration_seconds'):
- parsed_duration = int(entry.itunes_duration_seconds)
- elif hasattr(entry, 'duration'):
- parsed_duration = parse_duration(entry.duration)
- elif hasattr(entry, 'length'):
- parsed_duration = int(entry.length)
-
- try:
- # Check for existing episode
- if database_type == "postgresql":
- episode_check_query = """
- SELECT EpisodeID FROM "PeopleEpisodes"
- WHERE PersonID = %s::integer
- AND PodcastID = %s::integer
- AND EpisodeURL = %s
- """
- else:
- episode_check_query = """
- SELECT EpisodeID FROM PeopleEpisodes
- WHERE PersonID = %s
- AND PodcastID = %s
- AND EpisodeURL = %s
- """
-
- cursor.execute(episode_check_query, (person_id, podcast_id, parsed_audio_url))
- episode_result = cursor.fetchone()
-
- if episode_result:
- episode_id = episode_result[0]
- processed_episodes.add(episode_id)
- continue
-
- # Insert new episode
- if database_type == "postgresql":
- insert_query = """
- INSERT INTO "PeopleEpisodes"
- (PersonID, PodcastID, EpisodeTitle, EpisodeDescription,
- EpisodeURL, EpisodeArtwork, EpisodePubDate, EpisodeDuration)
- VALUES (%s::integer, %s::integer, %s, %s, %s, %s, %s, %s)
- RETURNING EpisodeID
- """
- else:
- insert_query = """
- INSERT INTO PeopleEpisodes
- (PersonID, PodcastID, EpisodeTitle, EpisodeDescription,
- EpisodeURL, EpisodeArtwork, EpisodePubDate, EpisodeDuration)
- VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
- """
-
- cursor.execute(insert_query, (
- person_id,
- podcast_id,
- parsed_title,
- parsed_description,
- parsed_audio_url,
- parsed_artwork_url,
- parsed_release_datetime,
- parsed_duration
- ))
-
- # Get the ID of the newly inserted episode
- if database_type == "postgresql":
- new_episode_id = cursor.fetchone()[0]
- else:
- cursor.execute('SELECT LAST_INSERT_ID()')
- new_episode_id = cursor.fetchone()[0]
-
- processed_episodes.add(new_episode_id)
-
- except Exception as e:
- logging.debug(f"Skipping episode '{parsed_title}' during person podcast import - {str(e)}")
- continue
-
- # Clean up old episodes
- episodes_to_delete = existing_episodes - processed_episodes
- if episodes_to_delete:
- if database_type == "postgresql":
- delete_query = """
- DELETE FROM "PeopleEpisodes"
- WHERE PersonID = %s::integer
- AND PodcastID = %s::integer
- AND EpisodeID = ANY(%s)
- AND EpisodePubDate < NOW() - INTERVAL '30 days'
- """
- cursor.execute(delete_query, (person_id, podcast_id, list(episodes_to_delete)))
- else:
- if episodes_to_delete: # Only proceed if there are episodes to delete
- placeholders = ','.join(['%s'] * len(episodes_to_delete))
- delete_query = f"""
- DELETE FROM PeopleEpisodes
- WHERE PersonID = %s
- AND PodcastID = %s
- AND EpisodeID IN ({placeholders})
- AND EpisodePubDate < DATE_SUB(NOW(), INTERVAL 30 DAY)
- """
- cursor.execute(delete_query, (person_id, podcast_id) + tuple(episodes_to_delete))
-
- cnx.commit()
-
- except Exception as e:
- if database_type == "postgresql":
- cursor.execute("ROLLBACK")
- else:
- cnx.rollback()
- logging.error(f"Error processing feed {feed_url}: {str(e)}")
- raise
-
- finally:
- cursor.close()
-
-def remove_youtube_channel_by_url(cnx, database_type, channel_name, channel_url, user_id):
- cursor = cnx.cursor()
- print('got to remove youtube channel')
- try:
- # Get the PodcastID first
- if database_type == "postgresql":
- select_podcast_id = '''
- SELECT PodcastID
- FROM "Podcasts"
- WHERE PodcastName = %s
- AND FeedURL = %s
- AND UserID = %s
- AND IsYouTubeChannel = TRUE
- '''
- else: # MySQL or MariaDB
- select_podcast_id = '''
- SELECT PodcastID
- FROM Podcasts
- WHERE PodcastName = %s
- AND FeedURL = %s
- AND UserID = %s
- AND IsYouTubeChannel = TRUE
- '''
- cursor.execute(select_podcast_id, (channel_name, channel_url, user_id))
- result = cursor.fetchone()
- if result:
- podcast_id = result[0] if not isinstance(result, dict) else result.get('podcastid')
- else:
- raise ValueError(f"No YouTube channel found with name {channel_name}")
-
- # Get all video IDs for the podcast so we can delete the files
- if database_type == "postgresql":
- get_video_ids_query = 'SELECT YouTubeVideoID FROM "YouTubeVideos" WHERE PodcastID = %s'
- else: # MySQL or MariaDB
- get_video_ids_query = "SELECT YouTubeVideoID FROM YouTubeVideos WHERE PodcastID = %s"
-
- cursor.execute(get_video_ids_query, (podcast_id,))
- video_ids = cursor.fetchall()
-
- # Delete the MP3 files for each video
- for video_id in video_ids:
- if isinstance(video_id, tuple):
- video_id_str = video_id[0]
- else: # dict
- video_id_str = video_id["youtubevideoid"]
-
- # Delete the MP3 file
- file_paths = [
- f"/opt/pinepods/downloads/youtube/{video_id_str}.mp3",
- f"/opt/pinepods/downloads/youtube/{video_id_str}.mp3.mp3" # In case of double extension
- ]
-
- for file_path in file_paths:
- if os.path.exists(file_path):
- try:
- os.remove(file_path)
- print(f"Deleted file: {file_path}")
- except Exception as e:
- print(f"Failed to delete file {file_path}: {str(e)}")
-
- # Delete related data - now including all tables
- if database_type == "postgresql":
- delete_queries = [
- ('DELETE FROM "PlaylistContents" WHERE EpisodeID IN (SELECT VideoID FROM "YouTubeVideos" WHERE PodcastID = %s)', (podcast_id,)),
- ('DELETE FROM "UserEpisodeHistory" WHERE EpisodeID IN (SELECT VideoID FROM "YouTubeVideos" WHERE PodcastID = %s)', (podcast_id,)),
- ('DELETE FROM "UserVideoHistory" WHERE VideoID IN (SELECT VideoID FROM "YouTubeVideos" WHERE PodcastID = %s)', (podcast_id,)),
- ('DELETE FROM "DownloadedEpisodes" WHERE EpisodeID IN (SELECT VideoID FROM "YouTubeVideos" WHERE PodcastID = %s)', (podcast_id,)),
- ('DELETE FROM "DownloadedVideos" WHERE VideoID IN (SELECT VideoID FROM "YouTubeVideos" WHERE PodcastID = %s)', (podcast_id,)),
- ('DELETE FROM "SavedVideos" WHERE VideoID IN (SELECT VideoID FROM "YouTubeVideos" WHERE PodcastID = %s)', (podcast_id,)),
- ('DELETE FROM "SavedEpisodes" WHERE EpisodeID IN (SELECT VideoID FROM "YouTubeVideos" WHERE PodcastID = %s)', (podcast_id,)),
- ('DELETE FROM "EpisodeQueue" WHERE EpisodeID IN (SELECT VideoID FROM "YouTubeVideos" WHERE PodcastID = %s)', (podcast_id,)),
- ('DELETE FROM "YouTubeVideos" WHERE PodcastID = %s', (podcast_id,)),
- ('DELETE FROM "Podcasts" WHERE PodcastID = %s AND IsYouTubeChannel = TRUE', (podcast_id,))
- ]
- else: # MySQL or MariaDB
- delete_queries = [
- ("DELETE FROM PlaylistContents WHERE EpisodeID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s)", (podcast_id,)),
- ("DELETE FROM UserEpisodeHistory WHERE EpisodeID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s)", (podcast_id,)),
- ("DELETE FROM UserVideoHistory WHERE VideoID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s)", (podcast_id,)),
- ("DELETE FROM DownloadedEpisodes WHERE EpisodeID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s)", (podcast_id,)),
- ("DELETE FROM DownloadedVideos WHERE VideoID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s)", (podcast_id,)),
- ("DELETE FROM SavedVideos WHERE VideoID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s)", (podcast_id,)),
- ("DELETE FROM SavedEpisodes WHERE EpisodeID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s)", (podcast_id,)),
- ("DELETE FROM EpisodeQueue WHERE EpisodeID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s)", (podcast_id,)),
- ("DELETE FROM YouTubeVideos WHERE PodcastID = %s", (podcast_id,)),
- ("DELETE FROM Podcasts WHERE PodcastID = %s AND IsYouTubeChannel = TRUE", (podcast_id,))
- ]
-
- for query, params in delete_queries:
- cursor.execute(query, params)
-
- # Update UserStats table
- if database_type == "postgresql":
- query = 'UPDATE "UserStats" SET PodcastsAdded = GREATEST(PodcastsAdded - 1, 0) WHERE UserID = %s'
- else: # MySQL or MariaDB
- query = "UPDATE UserStats SET PodcastsAdded = GREATEST(PodcastsAdded - 1, 0) WHERE UserID = %s"
- cursor.execute(query, (user_id,))
-
- cnx.commit()
- except (psycopg.Error, mysql.connector.Error) as err:
- print(f"Database Error: {err}")
- cnx.rollback()
- raise
- except Exception as e:
- print(f"General Error in remove_youtube_channel_by_url: {e}")
- cnx.rollback()
- raise
- finally:
- cursor.close()
-
-def remove_podcast(cnx, database_type, podcast_name, podcast_url, user_id):
- cursor = cnx.cursor()
- print('got to remove')
- try:
- # Get the PodcastID first
- if database_type == "postgresql":
- select_podcast_id = 'SELECT PodcastID FROM "Podcasts" WHERE PodcastName = %s AND FeedURL = %s AND UserID = %s'
- else: # MySQL or MariaDB
- select_podcast_id = "SELECT PodcastID FROM Podcasts WHERE PodcastName = %s AND FeedURL = %s AND UserID = %s"
-
- cursor.execute(select_podcast_id, (podcast_name, podcast_url, user_id))
- result = cursor.fetchone()
-
- if result:
- podcast_id = result[0] if not isinstance(result, dict) else result.get('podcastid')
- else:
- raise ValueError(f"No podcast found with name {podcast_name}")
-
- # Special handling for initialization-added feeds
- if podcast_url == "https://news.pinepods.online/feed.xml":
- # First, delete all related entries manually to avoid foreign key issues
- if database_type == "postgresql":
- queries = [
- # DELETE FROM PLAYLIST CONTENTS - Add this first!
- 'DELETE FROM "PlaylistContents" WHERE EpisodeID IN (SELECT EpisodeID FROM "Episodes" WHERE PodcastID = %s)',
- 'DELETE FROM "UserEpisodeHistory" WHERE EpisodeID IN (SELECT EpisodeID FROM "Episodes" WHERE PodcastID = %s)',
- 'DELETE FROM "DownloadedEpisodes" WHERE EpisodeID IN (SELECT EpisodeID FROM "Episodes" WHERE PodcastID = %s)',
- 'DELETE FROM "SavedEpisodes" WHERE EpisodeID IN (SELECT EpisodeID FROM "Episodes" WHERE PodcastID = %s)',
- 'DELETE FROM "EpisodeQueue" WHERE EpisodeID IN (SELECT EpisodeID FROM "Episodes" WHERE PodcastID = %s)',
- 'DELETE FROM "Episodes" WHERE PodcastID = %s',
- 'DELETE FROM "Podcasts" WHERE PodcastID = %s',
- 'UPDATE "AppSettings" SET NewsFeedSubscribed = FALSE'
- ]
- else: # MySQL or MariaDB
- queries = [
- # DELETE FROM PLAYLIST CONTENTS - Add this first!
- "DELETE FROM PlaylistContents WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)",
- "DELETE FROM UserEpisodeHistory WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)",
- "DELETE FROM DownloadedEpisodes WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)",
- "DELETE FROM SavedEpisodes WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)",
- "DELETE FROM EpisodeQueue WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)",
- "SET FOREIGN_KEY_CHECKS = 0",
- "DELETE FROM Episodes WHERE PodcastID = %s",
- "DELETE FROM Podcasts WHERE PodcastID = %s",
- "SET FOREIGN_KEY_CHECKS = 1",
- "UPDATE AppSettings SET NewsFeedSubscribed = 0"
- ]
-
- for query in queries:
- if query.startswith('SET'):
- cursor.execute(query)
- elif query.startswith('UPDATE'):
- cursor.execute(query)
- else:
- cursor.execute(query, (podcast_id,))
-
- else:
- # Normal podcast deletion process
- if database_type == "postgresql":
- delete_queries = [
- # DELETE FROM PLAYLIST CONTENTS - Add this first!
- ('DELETE FROM "PlaylistContents" WHERE EpisodeID IN (SELECT EpisodeID FROM "Episodes" WHERE PodcastID = %s)', (podcast_id,)),
- ('DELETE FROM "UserEpisodeHistory" WHERE EpisodeID IN (SELECT EpisodeID FROM "Episodes" WHERE PodcastID = %s)', (podcast_id,)),
- ('DELETE FROM "DownloadedEpisodes" WHERE EpisodeID IN (SELECT EpisodeID FROM "Episodes" WHERE PodcastID = %s)', (podcast_id,)),
- ('DELETE FROM "SavedEpisodes" WHERE EpisodeID IN (SELECT EpisodeID FROM "Episodes" WHERE PodcastID = %s)', (podcast_id,)),
- ('DELETE FROM "EpisodeQueue" WHERE EpisodeID IN (SELECT EpisodeID FROM "Episodes" WHERE PodcastID = %s)', (podcast_id,)),
- ('DELETE FROM "Episodes" WHERE PodcastID = %s', (podcast_id,)),
- ('DELETE FROM "Podcasts" WHERE PodcastID = %s', (podcast_id,))
- ]
- else: # MySQL or MariaDB
- delete_queries = [
- # DELETE FROM PLAYLIST CONTENTS - Add this first!
- ("DELETE FROM PlaylistContents WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)", (podcast_id,)),
- ("DELETE FROM UserEpisodeHistory WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)", (podcast_id,)),
- ("DELETE FROM DownloadedEpisodes WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)", (podcast_id,)),
- ("DELETE FROM SavedEpisodes WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)", (podcast_id,)),
- ("DELETE FROM EpisodeQueue WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)", (podcast_id,)),
- ("DELETE FROM Episodes WHERE PodcastID = %s", (podcast_id,)),
- ("DELETE FROM Podcasts WHERE PodcastID = %s", (podcast_id,))
- ]
-
- for query, params in delete_queries:
- cursor.execute(query, params)
-
- # Update UserStats table to decrement PodcastsAdded count
- if database_type == "postgresql":
- query = 'UPDATE "UserStats" SET PodcastsAdded = GREATEST(PodcastsAdded - 1, 0) WHERE UserID = %s'
- else: # MySQL or MariaDB
- query = "UPDATE UserStats SET PodcastsAdded = GREATEST(PodcastsAdded - 1, 0) WHERE UserID = %s"
-
- cursor.execute(query, (user_id,))
- cnx.commit()
-
- except (psycopg.Error, mysql.connector.Error) as err:
- print(f"Database Error: {err}")
- cnx.rollback()
- raise
- except Exception as e:
- print(f"General Error in remove_podcast: {e}")
- cnx.rollback()
- raise
- finally:
- cursor.close()
-
-
-def remove_podcast_id(cnx, database_type, podcast_id, user_id):
- cursor = cnx.cursor()
-
- try:
- # If there's no podcast ID found, raise an error or exit the function early
- if podcast_id is None:
- raise ValueError("No podcast found with ID {}".format(podcast_id))
-
- # Delete user episode history entries associated with the podcast
- if database_type == "postgresql":
- # DELETE FROM PLAYLIST CONTENTS - Add this first!
- delete_playlist_contents = 'DELETE FROM "PlaylistContents" WHERE EpisodeID IN (SELECT EpisodeID FROM "Episodes" WHERE PodcastID = %s)'
- delete_history = 'DELETE FROM "UserEpisodeHistory" WHERE EpisodeID IN (SELECT EpisodeID FROM "Episodes" WHERE PodcastID = %s)'
- delete_downloaded = 'DELETE FROM "DownloadedEpisodes" WHERE EpisodeID IN (SELECT EpisodeID FROM "Episodes" WHERE PodcastID = %s)'
- delete_saved = 'DELETE FROM "SavedEpisodes" WHERE EpisodeID IN (SELECT EpisodeID FROM "Episodes" WHERE PodcastID = %s)'
- delete_queue = 'DELETE FROM "EpisodeQueue" WHERE EpisodeID IN (SELECT EpisodeID FROM "Episodes" WHERE PodcastID = %s)'
- delete_episodes = 'DELETE FROM "Episodes" WHERE PodcastID = %s'
- delete_podcast = 'DELETE FROM "Podcasts" WHERE PodcastID = %s'
- update_user_stats = 'UPDATE "UserStats" SET PodcastsAdded = PodcastsAdded - 1 WHERE UserID = %s'
- else: # MySQL or MariaDB
- # DELETE FROM PLAYLIST CONTENTS - Add this first!
- delete_playlist_contents = "DELETE FROM PlaylistContents WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)"
- delete_history = "DELETE FROM UserEpisodeHistory WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)"
- delete_downloaded = "DELETE FROM DownloadedEpisodes WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)"
- delete_saved = "DELETE FROM SavedEpisodes WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)"
- delete_queue = "DELETE FROM EpisodeQueue WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)"
- delete_episodes = "DELETE FROM Episodes WHERE PodcastID = %s"
- delete_podcast = "DELETE FROM Podcasts WHERE PodcastID = %s"
- update_user_stats = "UPDATE UserStats SET PodcastsAdded = PodcastsAdded - 1 WHERE UserID = %s"
-
- # Execute the deletion statements in order
- cursor.execute(delete_playlist_contents, (podcast_id,))
- cursor.execute(delete_history, (podcast_id,))
- cursor.execute(delete_downloaded, (podcast_id,))
- cursor.execute(delete_saved, (podcast_id,))
- cursor.execute(delete_queue, (podcast_id,))
- cursor.execute(delete_episodes, (podcast_id,))
- cursor.execute(delete_podcast, (podcast_id,))
- cursor.execute(update_user_stats, (user_id,))
-
- cnx.commit()
- except (psycopg.Error, mysql.connector.Error) as err:
- print("Error: {}".format(err))
- cnx.rollback()
- finally:
- cursor.close()
-
-def remove_youtube_channel(cnx, database_type, podcast_id, user_id):
- cursor = cnx.cursor()
- try:
- # First, get all video IDs for the podcast so we can delete the files
- if database_type == "postgresql":
- get_video_ids_query = 'SELECT YouTubeVideoID FROM "YouTubeVideos" WHERE PodcastID = %s'
- else: # MySQL or MariaDB
- get_video_ids_query = "SELECT YouTubeVideoID FROM YouTubeVideos WHERE PodcastID = %s"
-
- cursor.execute(get_video_ids_query, (podcast_id,))
- video_ids = cursor.fetchall()
-
- # Delete the MP3 files for each video
- for video_id in video_ids:
- if isinstance(video_id, tuple):
- video_id_str = video_id[0]
- else: # dict
- video_id_str = video_id["youtubevideoid"]
-
- # Delete the MP3 file
- file_paths = [
- f"/opt/pinepods/downloads/youtube/{video_id_str}.mp3",
- f"/opt/pinepods/downloads/youtube/{video_id_str}.mp3.mp3" # In case of double extension
- ]
-
- for file_path in file_paths:
- if os.path.exists(file_path):
- try:
- os.remove(file_path)
- print(f"Deleted file: {file_path}")
- except Exception as e:
- print(f"Failed to delete file {file_path}: {str(e)}")
-
- # Delete from the related tables
- if database_type == "postgresql":
- delete_playlist_contents = 'DELETE FROM "PlaylistContents" WHERE EpisodeID IN (SELECT VideoID FROM "YouTubeVideos" WHERE PodcastID = %s)'
- delete_history = 'DELETE FROM "UserEpisodeHistory" WHERE EpisodeID IN (SELECT VideoID FROM "YouTubeVideos" WHERE PodcastID = %s)'
- delete_video_history = 'DELETE FROM "UserVideoHistory" WHERE VideoID IN (SELECT VideoID FROM "YouTubeVideos" WHERE PodcastID = %s)'
- delete_downloaded = 'DELETE FROM "DownloadedEpisodes" WHERE EpisodeID IN (SELECT VideoID FROM "YouTubeVideos" WHERE PodcastID = %s)'
- delete_downloaded_videos = 'DELETE FROM "DownloadedVideos" WHERE VideoID IN (SELECT VideoID FROM "YouTubeVideos" WHERE PodcastID = %s)'
- delete_saved_videos = 'DELETE FROM "SavedVideos" WHERE VideoID IN (SELECT VideoID FROM "YouTubeVideos" WHERE PodcastID = %s)'
- delete_saved = 'DELETE FROM "SavedEpisodes" WHERE EpisodeID IN (SELECT VideoID FROM "YouTubeVideos" WHERE PodcastID = %s)'
- delete_queue = 'DELETE FROM "EpisodeQueue" WHERE EpisodeID IN (SELECT VideoID FROM "YouTubeVideos" WHERE PodcastID = %s)'
- delete_videos = 'DELETE FROM "YouTubeVideos" WHERE PodcastID = %s'
- delete_podcast = 'DELETE FROM "Podcasts" WHERE PodcastID = %s AND IsYouTubeChannel = TRUE'
- update_user_stats = 'UPDATE "UserStats" SET PodcastsAdded = PodcastsAdded - 1 WHERE UserID = %s'
- else: # MySQL or MariaDB
- delete_playlist_contents = "DELETE FROM PlaylistContents WHERE EpisodeID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s)"
- delete_history = "DELETE FROM UserEpisodeHistory WHERE EpisodeID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s)"
- delete_video_history = "DELETE FROM UserVideoHistory WHERE VideoID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s)"
- delete_downloaded = "DELETE FROM DownloadedEpisodes WHERE EpisodeID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s)"
- delete_downloaded_videos = "DELETE FROM DownloadedVideos WHERE VideoID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s)"
- delete_saved_videos = "DELETE FROM SavedVideos WHERE VideoID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s)"
- delete_saved = "DELETE FROM SavedEpisodes WHERE EpisodeID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s)"
- delete_queue = "DELETE FROM EpisodeQueue WHERE EpisodeID IN (SELECT VideoID FROM YouTubeVideos WHERE PodcastID = %s)"
- delete_videos = "DELETE FROM YouTubeVideos WHERE PodcastID = %s"
- delete_podcast = "DELETE FROM Podcasts WHERE PodcastID = %s AND IsYouTubeChannel = TRUE"
- update_user_stats = "UPDATE UserStats SET PodcastsAdded = PodcastsAdded - 1 WHERE UserID = %s"
-
- # Execute the deletion statements in order
- cursor.execute(delete_playlist_contents, (podcast_id,))
- cursor.execute(delete_history, (podcast_id,))
- cursor.execute(delete_video_history, (podcast_id,))
- cursor.execute(delete_downloaded, (podcast_id,))
- cursor.execute(delete_downloaded_videos, (podcast_id,))
- cursor.execute(delete_saved_videos, (podcast_id,))
- cursor.execute(delete_saved, (podcast_id,))
- cursor.execute(delete_queue, (podcast_id,))
- cursor.execute(delete_videos, (podcast_id,))
- cursor.execute(delete_podcast, (podcast_id,))
- cursor.execute(update_user_stats, (user_id,))
-
- cnx.commit()
- except (psycopg.Error, mysql.connector.Error) as err:
- print("Error: {}".format(err))
- cnx.rollback()
- finally:
- cursor.close()
-
-def return_episodes(database_type, cnx, user_id):
- if database_type == "postgresql":
- cnx.row_factory = dict_row
- cursor = cnx.cursor()
- query = """
- SELECT * FROM (
- SELECT
- "Podcasts".PodcastName as podcastname,
- "Episodes".EpisodeTitle as episodetitle,
- "Episodes".EpisodePubDate as episodepubdate,
- "Episodes".EpisodeDescription as episodedescription,
- "Episodes".EpisodeArtwork as episodeartwork,
- "Episodes".EpisodeURL as episodeurl,
- "Episodes".EpisodeDuration as episodeduration,
- "UserEpisodeHistory".ListenDuration as listenduration,
- "Episodes".EpisodeID as episodeid,
- "Episodes".Completed as completed,
- CASE WHEN "SavedEpisodes".EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS saved,
- CASE WHEN "EpisodeQueue".EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS queued,
- CASE WHEN "DownloadedEpisodes".EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded,
- FALSE as is_youtube
- FROM "Episodes"
- INNER JOIN "Podcasts" ON "Episodes".PodcastID = "Podcasts".PodcastID
- LEFT JOIN "UserEpisodeHistory" ON
- "Episodes".EpisodeID = "UserEpisodeHistory".EpisodeID
- AND "UserEpisodeHistory".UserID = %s
- LEFT JOIN "SavedEpisodes" ON
- "Episodes".EpisodeID = "SavedEpisodes".EpisodeID
- AND "SavedEpisodes".UserID = %s
- LEFT JOIN "EpisodeQueue" ON
- "Episodes".EpisodeID = "EpisodeQueue".EpisodeID
- AND "EpisodeQueue".UserID = %s
- LEFT JOIN "DownloadedEpisodes" ON
- "Episodes".EpisodeID = "DownloadedEpisodes".EpisodeID
- AND "DownloadedEpisodes".UserID = %s
- WHERE "Episodes".EpisodePubDate >= NOW() - INTERVAL '30 days'
- AND "Podcasts".UserID = %s
-
- UNION ALL
-
- SELECT
- "Podcasts".PodcastName as podcastname,
- "YouTubeVideos".VideoTitle as episodetitle,
- "YouTubeVideos".PublishedAt as episodepubdate,
- "YouTubeVideos".VideoDescription as episodedescription,
- "YouTubeVideos".ThumbnailURL as episodeartwork,
- "YouTubeVideos".VideoURL as episodeurl,
- "YouTubeVideos".Duration as episodeduration,
- "YouTubeVideos".ListenPosition as listenduration,
- "YouTubeVideos".VideoID as episodeid,
- "YouTubeVideos".Completed as completed,
- CASE WHEN "SavedVideos".VideoID IS NOT NULL THEN TRUE ELSE FALSE END AS saved,
- CASE WHEN "EpisodeQueue".EpisodeID IS NOT NULL AND "EpisodeQueue".is_youtube = TRUE THEN TRUE ELSE FALSE END AS queued,
- CASE WHEN "DownloadedVideos".VideoID IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded,
- TRUE as is_youtube
- FROM "YouTubeVideos"
- INNER JOIN "Podcasts" ON "YouTubeVideos".PodcastID = "Podcasts".PodcastID
- LEFT JOIN "SavedVideos" ON
- "YouTubeVideos".VideoID = "SavedVideos".VideoID
- AND "SavedVideos".UserID = %s
- LEFT JOIN "EpisodeQueue" ON
- "YouTubeVideos".VideoID = "EpisodeQueue".EpisodeID
- AND "EpisodeQueue".UserID = %s
- AND "EpisodeQueue".is_youtube = TRUE
- LEFT JOIN "DownloadedVideos" ON
- "YouTubeVideos".VideoID = "DownloadedVideos".VideoID
- AND "DownloadedVideos".UserID = %s
- WHERE "YouTubeVideos".PublishedAt >= NOW() - INTERVAL '30 days'
- AND "Podcasts".UserID = %s
- ) combined
- ORDER BY episodepubdate DESC
- """
- else: # MySQL or MariaDB
- cursor = cnx.cursor(dictionary=True)
- query = """
- SELECT * FROM (
- SELECT
- Podcasts.PodcastName as podcastname,
- Episodes.EpisodeTitle as episodetitle,
- Episodes.EpisodePubDate as episodepubdate,
- Episodes.EpisodeDescription as episodedescription,
- Episodes.EpisodeArtwork as episodeartwork,
- Episodes.EpisodeURL as episodeurl,
- Episodes.EpisodeDuration as episodeduration,
- UserEpisodeHistory.ListenDuration as listenduration,
- Episodes.EpisodeID as episodeid,
- Episodes.Completed as completed,
- CASE WHEN SavedEpisodes.EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS saved,
- CASE WHEN EpisodeQueue.EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS queued,
- CASE WHEN DownloadedEpisodes.EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded,
- FALSE as is_youtube
- FROM Episodes
- INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID
- LEFT JOIN UserEpisodeHistory ON
- Episodes.EpisodeID = UserEpisodeHistory.EpisodeID
- AND UserEpisodeHistory.UserID = %s
- LEFT JOIN SavedEpisodes ON
- Episodes.EpisodeID = SavedEpisodes.EpisodeID
- AND SavedEpisodes.UserID = %s
- LEFT JOIN EpisodeQueue ON
- Episodes.EpisodeID = EpisodeQueue.EpisodeID
- AND EpisodeQueue.UserID = %s
- LEFT JOIN DownloadedEpisodes ON
- Episodes.EpisodeID = DownloadedEpisodes.EpisodeID
- AND DownloadedEpisodes.UserID = %s
- WHERE Episodes.EpisodePubDate >= DATE_SUB(NOW(), INTERVAL 30 DAY)
- AND Podcasts.UserID = %s
-
- UNION ALL
-
- SELECT
- Podcasts.PodcastName as podcastname,
- YouTubeVideos.VideoTitle as episodetitle,
- YouTubeVideos.PublishedAt as episodepubdate,
- YouTubeVideos.VideoDescription as episodedescription,
- YouTubeVideos.ThumbnailURL as episodeartwork,
- YouTubeVideos.VideoURL as episodeurl,
- YouTubeVideos.Duration as episodeduration,
- YouTubeVideos.ListenPosition as listenduration,
- YouTubeVideos.VideoID as episodeid,
- YouTubeVideos.Completed as completed,
- CASE WHEN SavedVideos.VideoID IS NOT NULL THEN 1 ELSE 0 END AS saved,
- CASE WHEN EpisodeQueue.EpisodeID IS NOT NULL AND EpisodeQueue.is_youtube = 1 THEN 1 ELSE 0 END AS queued,
- CASE WHEN DownloadedVideos.VideoID IS NOT NULL THEN 1 ELSE 0 END AS downloaded,
- 1 as is_youtube
- FROM YouTubeVideos
- INNER JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID
- LEFT JOIN SavedVideos ON
- YouTubeVideos.VideoID = SavedVideos.VideoID
- AND SavedVideos.UserID = %s
- LEFT JOIN EpisodeQueue ON
- YouTubeVideos.VideoID = EpisodeQueue.EpisodeID
- AND EpisodeQueue.UserID = %s
- AND EpisodeQueue.is_youtube = 1
- LEFT JOIN DownloadedVideos ON
- YouTubeVideos.VideoID = DownloadedVideos.VideoID
- AND DownloadedVideos.UserID = %s
- WHERE YouTubeVideos.PublishedAt >= DATE_SUB(NOW(), INTERVAL 30 DAY)
- AND Podcasts.UserID = %s
- ) combined
- ORDER BY episodepubdate DESC
- """
-
- # Execute with all params for both unions
- params = (user_id,) * 9 # user_id repeated 9 times for all the places needed
- cursor.execute(query, params)
- rows = cursor.fetchall()
- cursor.close()
-
- if not rows:
- return []
-
- if database_type != "postgresql":
- # Convert column names to lowercase for MySQL and ensure boolean fields are actual booleans
- bool_fields = ['completed', 'saved', 'queued', 'downloaded', 'is_youtube']
- rows = [{k.lower(): (bool(v) if k.lower() in bool_fields else v)
- for k, v in row.items()} for row in rows]
-
- return rows
-
-def return_person_episodes(database_type, cnx, user_id: int, person_id: int):
- if database_type == "postgresql":
- cnx.row_factory = dict_row
- cursor = cnx.cursor()
- else:
- cursor = cnx.cursor(dictionary=True)
-
- try:
- if database_type == "postgresql":
- query = """
- SELECT
- e.EpisodeID, -- Will be NULL if no match in Episodes table
- pe.EpisodeTitle,
- pe.EpisodeDescription,
- pe.EpisodeURL,
- CASE
- WHEN pe.EpisodeArtwork IS NULL THEN
- (SELECT ArtworkURL FROM "Podcasts" WHERE PodcastID = pe.PodcastID)
- ELSE pe.EpisodeArtwork
- END as EpisodeArtwork,
- pe.EpisodePubDate,
- pe.EpisodeDuration,
- p.PodcastName,
- CASE
- WHEN (
- SELECT 1 FROM "Podcasts"
- WHERE PodcastID = pe.PodcastID
- AND UserID = %s
- ) IS NOT NULL THEN
- CASE
- WHEN s.EpisodeID IS NOT NULL THEN TRUE
- ELSE FALSE
- END
- ELSE FALSE
- END AS Saved,
- CASE
- WHEN (
- SELECT 1 FROM "Podcasts"
- WHERE PodcastID = pe.PodcastID
- AND UserID = %s
- ) IS NOT NULL THEN
- CASE
- WHEN d.EpisodeID IS NOT NULL THEN TRUE
- ELSE FALSE
- END
- ELSE FALSE
- END AS Downloaded,
- CASE
- WHEN (
- SELECT 1 FROM "Podcasts"
- WHERE PodcastID = pe.PodcastID
- AND UserID = %s
- ) IS NOT NULL THEN
- COALESCE(h.ListenDuration, 0)
- ELSE 0
- END AS ListenDuration,
- FALSE as is_youtube
- FROM "PeopleEpisodes" pe
- INNER JOIN "People" pp ON pe.PersonID = pp.PersonID
- INNER JOIN "Podcasts" p ON pe.PodcastID = p.PodcastID
- LEFT JOIN "Episodes" e ON e.EpisodeURL = pe.EpisodeURL AND e.PodcastID = pe.PodcastID
- LEFT JOIN (
- SELECT * FROM "SavedEpisodes" WHERE UserID = %s
- ) s ON s.EpisodeID = e.EpisodeID
- LEFT JOIN (
- SELECT * FROM "DownloadedEpisodes" WHERE UserID = %s
- ) d ON d.EpisodeID = e.EpisodeID
- LEFT JOIN (
- SELECT * FROM "UserEpisodeHistory" WHERE UserID = %s
- ) h ON h.EpisodeID = e.EpisodeID
- WHERE pe.PersonID = %s
- AND pe.EpisodePubDate >= NOW() - INTERVAL '30 days'
- ORDER BY pe.EpisodePubDate DESC;
- """
- else:
- query = """
- SELECT
- e.EpisodeID, -- Will be NULL if no match in Episodes table
- pe.EpisodeTitle,
- pe.EpisodeDescription,
- pe.EpisodeURL,
- COALESCE(pe.EpisodeArtwork, p.ArtworkURL) as EpisodeArtwork,
- pe.EpisodePubDate,
- pe.EpisodeDuration,
- p.PodcastName,
- IF(
- EXISTS(
- SELECT 1 FROM Podcasts
- WHERE PodcastID = pe.PodcastID
- AND UserID = %s
- ),
- IF(s.EpisodeID IS NOT NULL, TRUE, FALSE),
- FALSE
- ) AS Saved,
- IF(
- EXISTS(
- SELECT 1 FROM Podcasts
- WHERE PodcastID = pe.PodcastID
- AND UserID = %s
- ),
- IF(d.EpisodeID IS NOT NULL, TRUE, FALSE),
- FALSE
- ) AS Downloaded,
- IF(
- EXISTS(
- SELECT 1 FROM Podcasts
- WHERE PodcastID = pe.PodcastID
- AND UserID = %s
- ),
- COALESCE(h.ListenDuration, 0),
- 0
- ) AS ListenDuration,
- FALSE as is_youtube
- FROM PeopleEpisodes pe
- INNER JOIN People pp ON pe.PersonID = pp.PersonID
- INNER JOIN Podcasts p ON pe.PodcastID = p.PodcastID
- LEFT JOIN Episodes e ON e.EpisodeURL = pe.EpisodeURL AND e.PodcastID = pe.PodcastID
- LEFT JOIN (
- SELECT * FROM SavedEpisodes WHERE UserID = %s
- ) s ON s.EpisodeID = e.EpisodeID
- LEFT JOIN (
- SELECT * FROM DownloadedEpisodes WHERE UserID = %s
- ) d ON d.EpisodeID = e.EpisodeID
- LEFT JOIN (
- SELECT * FROM UserEpisodeHistory WHERE UserID = %s
- ) h ON h.EpisodeID = e.EpisodeID
- WHERE pe.PersonID = %s
- AND pe.EpisodePubDate >= DATE_SUB(NOW(), INTERVAL 30 DAY)
- ORDER BY pe.EpisodePubDate DESC;
- """
-
- cursor.execute(query, (user_id,) * 6 + (person_id,))
- rows = cursor.fetchall()
-
- if not rows:
- return []
-
- if database_type != "postgresql":
- rows = [{k.lower(): (bool(v) if k.lower() in ['saved', 'downloaded'] else v)
- for k, v in row.items()} for row in rows]
-
- return rows
-
- except Exception as e:
- print(f"Error fetching person episodes: {e}")
- return None
- finally:
- cursor.close()
-
-def return_podcast_episodes(database_type, cnx, user_id, podcast_id):
- if database_type == "postgresql":
- cnx.row_factory = dict_row
- cursor = cnx.cursor()
- else: # Assuming MariaDB/MySQL if not PostgreSQL
- cursor = cnx.cursor(dictionary=True)
-
- if database_type == "postgresql":
- query = (
- 'SELECT "Podcasts".PodcastID, "Podcasts".PodcastName, "Episodes".EpisodeID, '
- '"Episodes".EpisodeTitle, "Episodes".EpisodePubDate, "Episodes".EpisodeDescription, '
- '"Episodes".EpisodeArtwork, "Episodes".EpisodeURL, "Episodes".EpisodeDuration, '
- '"Episodes".Completed, '
- '"UserEpisodeHistory".ListenDuration, CAST("Episodes".EpisodeID AS VARCHAR) AS guid '
- 'FROM "Episodes" '
- 'INNER JOIN "Podcasts" ON "Episodes".PodcastID = "Podcasts".PodcastID '
- 'LEFT JOIN "UserEpisodeHistory" ON "Episodes".EpisodeID = "UserEpisodeHistory".EpisodeID AND "UserEpisodeHistory".UserID = %s '
- 'WHERE "Podcasts".PodcastID = %s AND "Podcasts".UserID = %s '
- 'ORDER BY "Episodes".EpisodePubDate DESC'
- )
- else: # MySQL or MariaDB
- query = (
- "SELECT Podcasts.PodcastID, Podcasts.PodcastName, Episodes.EpisodeID, "
- "Episodes.EpisodeTitle, Episodes.EpisodePubDate, Episodes.EpisodeDescription, "
- "Episodes.EpisodeArtwork, Episodes.EpisodeURL, Episodes.EpisodeDuration, "
- "Episodes.Completed, "
- "UserEpisodeHistory.ListenDuration, CAST(Episodes.EpisodeID AS CHAR) AS guid "
- "FROM Episodes "
- "INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID "
- "LEFT JOIN UserEpisodeHistory ON Episodes.EpisodeID = UserEpisodeHistory.EpisodeID AND UserEpisodeHistory.UserID = %s "
- "WHERE Podcasts.PodcastID = %s AND Podcasts.UserID = %s "
- "ORDER BY Episodes.EpisodePubDate DESC"
- )
-
- cursor.execute(query, (user_id, podcast_id, user_id))
- rows = cursor.fetchall()
- cursor.close()
-
- # Normalize keys
- rows = capitalize_keys(rows)
-
- if database_type != "postgresql":
- for row in rows:
- row['Completed'] = bool(row['Completed'])
-
- return rows or None
-
-def return_youtube_episodes(database_type, cnx, user_id, podcast_id):
- if database_type == "postgresql":
- cnx.row_factory = dict_row
- cursor = cnx.cursor()
- else: # Assuming MariaDB/MySQL if not PostgreSQL
- cursor = cnx.cursor(dictionary=True)
-
- if database_type == "postgresql":
- query = (
- 'SELECT "Podcasts".PodcastID, "Podcasts".PodcastName, "YouTubeVideos".VideoID AS EpisodeID, '
- '"YouTubeVideos".VideoTitle AS EpisodeTitle, "YouTubeVideos".PublishedAt AS EpisodePubDate, '
- '"YouTubeVideos".VideoDescription AS EpisodeDescription, '
- '"YouTubeVideos".ThumbnailURL AS EpisodeArtwork, "YouTubeVideos".VideoURL AS EpisodeURL, '
- '"YouTubeVideos".Duration AS EpisodeDuration, '
- '"YouTubeVideos".ListenPosition AS ListenDuration, '
- '"YouTubeVideos".YouTubeVideoID AS guid '
- 'FROM "YouTubeVideos" '
- 'INNER JOIN "Podcasts" ON "YouTubeVideos".PodcastID = "Podcasts".PodcastID '
- 'WHERE "Podcasts".PodcastID = %s AND "Podcasts".UserID = %s '
- 'ORDER BY "YouTubeVideos".PublishedAt DESC'
- )
- else: # MySQL or MariaDB
- query = (
- "SELECT Podcasts.PodcastID, Podcasts.PodcastName, YouTubeVideos.VideoID AS EpisodeID, "
- "YouTubeVideos.VideoTitle AS EpisodeTitle, YouTubeVideos.PublishedAt AS EpisodePubDate, "
- "YouTubeVideos.VideoDescription AS EpisodeDescription, "
- "YouTubeVideos.ThumbnailURL AS EpisodeArtwork, YouTubeVideos.VideoURL AS EpisodeURL, "
- "YouTubeVideos.Duration AS EpisodeDuration, "
- "YouTubeVideos.ListenPosition AS ListenDuration, "
- "YouTubeVideos.YouTubeVideoID AS guid "
- "FROM YouTubeVideos "
- "INNER JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID "
- "WHERE Podcasts.PodcastID = %s AND Podcasts.UserID = %s "
- "ORDER BY YouTubeVideos.PublishedAt DESC"
- )
-
- cursor.execute(query, (podcast_id, user_id))
- rows = cursor.fetchall()
- cursor.close()
-
- # Normalize keys
- rows = capitalize_keys(rows)
- return rows or None
-
-def get_podcast_details(database_type, cnx, user_id, podcast_id):
- if isinstance(podcast_id, tuple):
- pod_id, episode_id = podcast_id
- else:
- pod_id = podcast_id
- episode_id = None
-
- if database_type == "postgresql":
- cnx.row_factory = dict_row
- cursor = cnx.cursor()
- else:
- cursor = cnx.cursor(dictionary=True)
-
- if database_type == "postgresql":
- query = """
- SELECT *
- FROM "Podcasts"
- WHERE PodcastID = %s AND UserID = %s
- """
- else:
- query = """
- SELECT *
- FROM Podcasts
- WHERE PodcastID = %s AND UserID = %s
- """
-
- cursor.execute(query, (pod_id, user_id))
- details = cursor.fetchone()
-
- if not details:
- cursor.execute(query, (pod_id, 1))
- details = cursor.fetchone()
-
- if details:
- lower_row = lowercase_keys(details)
-
- # Only get count from YouTubeVideos if this is a YouTube channel
- if lower_row.get('isyoutubechannel', False):
- if database_type == "postgresql":
- count_query = """
- SELECT COUNT(*) as count
- FROM "YouTubeVideos"
- WHERE PodcastID = %s
- """
- else:
- count_query = """
- SELECT COUNT(*) as count
- FROM YouTubeVideos
- WHERE PodcastID = %s
- """
-
- cursor.execute(count_query, (pod_id,))
- count_result = cursor.fetchone()
- episode_count = count_result['count'] if isinstance(count_result, dict) else count_result[0]
- lower_row['episodecount'] = episode_count
-
- if database_type != "postgresql":
- lower_row['explicit'] = bool(lower_row.get('explicit', 0))
- lower_row['isyoutubechannel'] = bool(lower_row.get('isyoutubechannel', 0))
- # You might also want to handle autodownload if it's used in the frontend
- lower_row['autodownload'] = bool(lower_row.get('autodownload', 0))
-
- bool_fix = convert_bools(lower_row, database_type)
- cursor.close()
- return bool_fix
-
- cursor.close()
- return None
-
-
-def get_podcast_id(database_type, cnx, user_id, podcast_feed, podcast_name):
- if database_type == "postgresql":
- cnx.row_factory = dict_row
- cursor = cnx.cursor()
- else: # Assuming MariaDB/MySQL if not PostgreSQL
- cursor = cnx.cursor(dictionary=True)
-
- if database_type == "postgresql":
- query = (
- 'SELECT PodcastID '
- 'FROM "Podcasts" '
- 'WHERE FeedURL = %s AND PodcastName = %s AND UserID = %s'
- )
- else: # MySQL or MariaDB
- query = (
- "SELECT PodcastID "
- "FROM Podcasts "
- "WHERE FeedURL = %s AND PodcastName = %s AND UserID = %s"
- )
-
- cursor.execute(query, (podcast_feed, podcast_name, user_id))
- row = cursor.fetchone() # Fetching only one row as we expect a single result
-
- cursor.close()
-
- if not row:
- return None
-
- if database_type == "postgresql":
- return row['podcastid'] # Assuming the column name is 'PodcastID'
- else:
- return row['PodcastID'] # Assuming the column name is 'PodcastID'
-
-def get_location_value(result, key, default=None):
- """
- Helper function to extract value from result set.
- It handles both dictionaries and tuples.
- """
- key_lower = key.lower()
- if isinstance(result, dict):
- return result.get(key_lower, default)
- elif isinstance(result, tuple):
- # Define a mapping of field names to their tuple indices for your specific queries
- key_map = {
- "downloadid": 0,
- "downloadedlocation": 1
- }
- index = key_map.get(key_lower)
- return result[index] if index is not None else default
- return default
-
-def delete_episode(database_type, cnx, episode_id, user_id, is_youtube=False):
- cursor = cnx.cursor()
- try:
- if is_youtube:
- # Get the download ID from the DownloadedVideos table
- if database_type == "postgresql":
- query = (
- 'SELECT DownloadID, DownloadedLocation '
- 'FROM "DownloadedVideos" '
- 'INNER JOIN "YouTubeVideos" ON "DownloadedVideos".VideoID = "YouTubeVideos".VideoID '
- 'INNER JOIN "Podcasts" ON "YouTubeVideos".PodcastID = "Podcasts".PodcastID '
- 'WHERE "YouTubeVideos".VideoID = %s AND "Podcasts".UserID = %s'
- )
- else:
- query = (
- "SELECT DownloadID, DownloadedLocation "
- "FROM DownloadedVideos "
- "INNER JOIN YouTubeVideos ON DownloadedVideos.VideoID = YouTubeVideos.VideoID "
- "INNER JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID "
- "WHERE YouTubeVideos.VideoID = %s AND Podcasts.UserID = %s"
- )
- else:
- # Original podcast episode query
- if database_type == "postgresql":
- query = (
- 'SELECT DownloadID, DownloadedLocation '
- 'FROM "DownloadedEpisodes" '
- 'INNER JOIN "Episodes" ON "DownloadedEpisodes".EpisodeID = "Episodes".EpisodeID '
- 'INNER JOIN "Podcasts" ON "Episodes".PodcastID = "Podcasts".PodcastID '
- 'WHERE "Episodes".EpisodeID = %s AND "Podcasts".UserID = %s'
- )
- else:
- query = (
- "SELECT DownloadID, DownloadedLocation "
- "FROM DownloadedEpisodes "
- "INNER JOIN Episodes ON DownloadedEpisodes.EpisodeID = Episodes.EpisodeID "
- "INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID "
- "WHERE Episodes.EpisodeID = %s AND Podcasts.UserID = %s"
- )
-
- cursor.execute(query, (episode_id, user_id))
- result = cursor.fetchone()
- logging.debug(f"Query result: {result}")
-
- if not result:
- logging.warning("No matching download found.")
- cursor.close()
- return
-
- download_id = get_location_value(result, "DownloadID")
- downloaded_location = get_location_value(result, "DownloadedLocation")
- logging.debug(f"DownloadID: {download_id}, DownloadedLocation: {downloaded_location}")
-
- # Delete the downloaded file (but not source YouTube file)
- if downloaded_location and os.path.exists(downloaded_location):
- if is_youtube:
- # Only delete if it's not in the YouTube source directory
- if not downloaded_location.startswith("/opt/pinepods/downloads/youtube/"):
- os.remove(downloaded_location)
- else:
- os.remove(downloaded_location)
- else:
- logging.warning(f"Downloaded file not found: {downloaded_location}")
-
- # Remove the entry from the appropriate downloads table
- if is_youtube:
- if database_type == "postgresql":
- query = 'DELETE FROM "DownloadedVideos" WHERE DownloadID = %s'
- else:
- query = "DELETE FROM DownloadedVideos WHERE DownloadID = %s"
- else:
- if database_type == "postgresql":
- query = 'DELETE FROM "DownloadedEpisodes" WHERE DownloadID = %s'
- else:
- query = "DELETE FROM DownloadedEpisodes WHERE DownloadID = %s"
-
- cursor.execute(query, (download_id,))
- cnx.commit()
- logging.info(f"Removed {cursor.rowcount} entry from the downloads table.")
-
- # Update UserStats table
- if database_type == "postgresql":
- query = 'UPDATE "UserStats" SET EpisodesDownloaded = EpisodesDownloaded - 1 WHERE UserID = %s'
- else:
- query = "UPDATE UserStats SET EpisodesDownloaded = EpisodesDownloaded - 1 WHERE UserID = %s"
-
- cursor.execute(query, (user_id,))
- cnx.commit()
-
- except Exception as e:
- logging.error(f"Error during episode deletion: {e}")
- cnx.rollback()
- finally:
- cursor.close()
-
-def return_pods(database_type, cnx, user_id):
- if database_type == "postgresql":
- cnx.row_factory = dict_row
- cursor = cnx.cursor()
- else:
- cursor = cnx.cursor(dictionary=True)
-
- # Base query remains the same but handles nulls and empty strings with NULLIF
- if database_type == "postgresql":
- query = """
- SELECT
- p.PodcastID,
- COALESCE(NULLIF(p.PodcastName, ''), 'Unknown Podcast') as PodcastName,
- COALESCE(NULLIF(p.ArtworkURL, ''), '/static/assets/default-podcast.png') as ArtworkURL,
- COALESCE(NULLIF(p.Description, ''), 'No description available') as Description,
- COALESCE(p.EpisodeCount, 0) as EpisodeCount,
- COALESCE(NULLIF(p.WebsiteURL, ''), '') as WebsiteURL,
- COALESCE(NULLIF(p.FeedURL, ''), '') as FeedURL,
- COALESCE(NULLIF(p.Author, ''), 'Unknown Author') as Author,
- COALESCE(NULLIF(p.Categories, ''), '') as Categories,
- COALESCE(p.Explicit, FALSE) as Explicit,
- COALESCE(p.PodcastIndexID, 0) as PodcastIndexID,
- COUNT(DISTINCT h.UserEpisodeHistoryID) as play_count,
- MIN(e.EpisodePubDate) as oldest_episode_date,
- COALESCE(
- (SELECT COUNT(DISTINCT ueh.EpisodeID)
- FROM "UserEpisodeHistory" ueh
- JOIN "Episodes" ep ON ueh.EpisodeID = ep.EpisodeID
- WHERE ep.PodcastID = p.PodcastID
- AND ueh.UserID = %s),
- 0
- ) as episodes_played
- FROM "Podcasts" p
- LEFT JOIN "Episodes" e ON p.PodcastID = e.PodcastID
- LEFT JOIN "UserEpisodeHistory" h ON e.EpisodeID = h.EpisodeID AND h.UserID = %s
- WHERE p.UserID = %s
- GROUP BY p.PodcastID
- """
- else: # MySQL/MariaDB version
- query = """
- SELECT
- p.PodcastID,
- COALESCE(NULLIF(p.PodcastName, ''), 'Unknown Podcast') as PodcastName,
- COALESCE(NULLIF(p.ArtworkURL, ''), '/static/assets/default-podcast.png') as ArtworkURL,
- COALESCE(NULLIF(p.Description, ''), 'No description available') as Description,
- COALESCE(p.EpisodeCount, 0) as EpisodeCount,
- COALESCE(NULLIF(p.WebsiteURL, ''), '') as WebsiteURL,
- COALESCE(NULLIF(p.FeedURL, ''), '') as FeedURL,
- COALESCE(NULLIF(p.Author, ''), 'Unknown Author') as Author,
- COALESCE(NULLIF(p.Categories, ''), '') as Categories,
- COALESCE(p.Explicit, FALSE) as Explicit,
- COALESCE(p.PodcastIndexID, 0) as PodcastIndexID,
- COUNT(DISTINCT h.UserEpisodeHistoryID) as play_count,
- MIN(e.EpisodePubDate) as oldest_episode_date,
- COALESCE(
- (SELECT COUNT(DISTINCT ueh.EpisodeID)
- FROM UserEpisodeHistory ueh
- JOIN Episodes ep ON ueh.EpisodeID = ep.EpisodeID
- WHERE ep.PodcastID = p.PodcastID
- AND ueh.UserID = %s),
- 0
- ) as episodes_played
- FROM Podcasts p
- LEFT JOIN Episodes e ON p.PodcastID = e.PodcastID
- LEFT JOIN UserEpisodeHistory h ON e.EpisodeID = h.EpisodeID AND h.UserID = %s
- WHERE p.UserID = %s
- GROUP BY p.PodcastID
- """
-
- try:
- cursor.execute(query, (user_id, user_id, user_id))
- rows = cursor.fetchall()
- except Exception as e:
- logging.error(f"Database error in return_pods: {str(e)}")
- return []
- finally:
- cursor.close()
-
- if not rows:
- return []
-
- # Process all rows, regardless of database type
- processed_rows = []
- for row in rows:
- # Convert to lowercase keys for consistency
- processed_row = {k.lower(): v for k, v in row.items()}
-
- # Define default values
- defaults = {
- 'podcastname': 'Unknown Podcast',
- 'artworkurl': '/static/assets/logo_random/11.jpeg',
- 'description': 'No description available',
- 'episodecount': 0,
- 'websiteurl': '',
- 'feedurl': '',
- 'author': 'Unknown Author',
- 'categories': '',
- 'explicit': False,
- 'podcastindexid': 0,
- 'play_count': 0,
- 'episodes_played': 0
- }
-
- # Apply defaults for any missing or null values
- for key, default_value in defaults.items():
- if key not in processed_row or processed_row[key] is None or processed_row[key] == "":
- processed_row[key] = default_value
-
- processed_rows.append(processed_row)
-
- return processed_rows
-
-def check_self_service(cnx, database_type):
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'SELECT SelfServiceUser FROM "AppSettings"'
- else: # MySQL or MariaDB
- query = "SELECT SelfServiceUser FROM AppSettings"
- cursor.execute(query)
- result = cursor.fetchone()
- cursor.close()
-
- if database_type == "postgresql":
- print(f'debug result: {result}')
- logging.debug(f'debug result: {result}')
- self_service = result['selfserviceuser'] if isinstance(result, dict) else result[0]
- else: # MySQL or MariaDB
- self_service = result[0]
-
- if self_service == 1:
- return True
- elif self_service == 0:
- return False
- else:
- return None
-
-def refresh_pods_for_user(cnx, database_type, podcast_id):
- print(f'Refresh begin for podcast {podcast_id}')
- cursor = cnx.cursor()
- if database_type == "postgresql":
- select_podcast = '''
- SELECT "podcastid", "feedurl", "artworkurl", "autodownload", "username", "password",
- "isyoutubechannel", COALESCE("feedurl", '') as channel_id, "feedcutoffdays"
- FROM "Podcasts"
- WHERE "podcastid" = %s
- '''
- else: # MySQL or MariaDB
- select_podcast = '''
- SELECT PodcastID, FeedURL, ArtworkURL, AutoDownload, Username, Password,
- IsYouTubeChannel, COALESCE(FeedURL, '') as channel_id, FeedCutoffDays
- FROM Podcasts
- WHERE PodcastID = %s
- '''
- cursor.execute(select_podcast, (podcast_id,))
- result = cursor.fetchone()
- new_episodes = []
-
- if result:
- if isinstance(result, dict):
- if database_type == "postgresql":
- # PostgreSQL - lowercase keys
- podcast_id = result['podcastid']
- feed_url = result['feedurl']
- artwork_url = result['artworkurl']
- auto_download = result['autodownload']
- username = result['username']
- password = result['password']
- is_youtube = result['isyoutubechannel']
- channel_id = result['channel_id']
- feed_cutoff = result['feedcutoffdays']
- else:
- # MariaDB - uppercase keys
- podcast_id = result['PodcastID']
- feed_url = result['FeedURL']
- artwork_url = result['ArtworkURL']
- auto_download = result['AutoDownload']
- username = result['Username']
- password = result['Password']
- is_youtube = result['IsYouTubeChannel']
- channel_id = result['channel_id']
- feed_cutoff = result['FeedCutoffDays']
- else:
- podcast_id, feed_url, artwork_url, auto_download, username, password, is_youtube, channel_id, feed_cutoff = result
-
- print(f'Processing podcast: {podcast_id}')
- if is_youtube:
- channel_id = feed_url.split('channel/')[-1] if 'channel/' in feed_url else feed_url
- channel_id = channel_id.split('/')[0].split('?')[0]
- youtube.process_youtube_videos(database_type, podcast_id, channel_id, cnx, feed_cutoff)
- else:
- episodes = add_episodes(cnx, database_type, podcast_id, feed_url,
- artwork_url, auto_download,
- username=username, password=password, websocket=True)
- new_episodes.extend(episodes)
-
- cursor.close()
- return new_episodes
-
-
-def refresh_pods(cnx, database_type):
- print('refresh begin')
- cursor = cnx.cursor()
- if database_type == "postgresql":
- select_podcasts = '''
- SELECT PodcastID, FeedURL, ArtworkURL, AutoDownload, Username, Password,
- IsYouTubeChannel, UserID, COALESCE(FeedURL, '') as channel_id, FeedCutoffDays
- FROM "Podcasts"
- '''
- else:
- select_podcasts = '''
- SELECT PodcastID, FeedURL, ArtworkURL, AutoDownload, Username, Password,
- IsYouTubeChannel, UserID, COALESCE(FeedURL, '') as channel_id, FeedCutoffDays
- FROM Podcasts
- '''
- cursor.execute(select_podcasts)
- result_set = cursor.fetchall()
- for result in result_set:
- podcast_id = None
- try:
- if isinstance(result, tuple):
- podcast_id, feed_url, artwork_url, auto_download, username, password, is_youtube, user_id, channel_id, feed_cutoff = result
- elif isinstance(result, dict):
- if database_type == "postgresql":
- podcast_id = result["podcastid"]
- feed_url = result["feedurl"]
- artwork_url = result["artworkurl"]
- auto_download = result["autodownload"]
- username = result["username"]
- password = result["password"]
- is_youtube = result["isyoutubechannel"]
- user_id = result["userid"]
- channel_id = result["channel_id"]
- feed_cutoff = result["feedcutoffdays"]
- else:
- podcast_id = result["PodcastID"]
- feed_url = result["FeedURL"]
- artwork_url = result["ArtworkURL"]
- auto_download = result["AutoDownload"]
- username = result["Username"]
- password = result["Password"]
- is_youtube = result["IsYouTubeChannel"]
- user_id = result["UserID"]
- channel_id = result["channel_id"]
- feed_cutoff = result["FeedCutoffDays"]
- else:
- raise ValueError(f"Unexpected result type: {type(result)}")
- print(f'Running for: {podcast_id}')
- if is_youtube:
- # Extract channel ID from feed URL
- channel_id = feed_url.split('channel/')[-1] if 'channel/' in feed_url else feed_url
- # Clean up any trailing slashes or query parameters
- channel_id = channel_id.split('/')[0].split('?')[0]
- youtube.process_youtube_videos(database_type, podcast_id, channel_id, cnx, feed_cutoff)
- else:
- add_episodes(cnx, database_type, podcast_id, feed_url, artwork_url,
- auto_download, username=username, password=password, websocket=False)
- except Exception as e:
- print(f"Error refreshing podcast {podcast_id}: {str(e)}")
- continue
- cursor.close()
-
-
-
-def remove_unavailable_episodes(cnx, database_type):
- cursor = cnx.cursor()
-
- # select all episodes
- # select all episodes
- if database_type == "postgresql":
- select_episodes = 'SELECT EpisodeID, PodcastID, EpisodeTitle, EpisodeURL, EpisodePubDate FROM "Episodes"'
- else: # MySQL or MariaDB
- select_episodes = "SELECT EpisodeID, PodcastID, EpisodeTitle, EpisodeURL, EpisodePubDate FROM Episodes"
- cursor.execute(select_episodes)
- episodes = cursor.fetchall()
-
- # iterate through all episodes
- for episode in episodes:
- episode_id, podcast_id, episode_title, episode_url, published_date = episode
-
- try:
- # check if episode URL is still valid
- response = requests.head(episode_url)
- if response.status_code == 404:
- # remove episode from database
- if database_type == "postgresql":
- delete_episode = 'DELETE FROM "Episodes" WHERE "EpisodeID"=%s'
- else: # MySQL or MariaDB
- delete_episode = "DELETE FROM Episodes WHERE EpisodeID=%s"
- cursor.execute(delete_episode, (episode_id,))
- cnx.commit()
-
- except Exception as e:
- print(f"Error checking episode {episode_id}: {e}")
-
- cursor.close()
- # cnx.close()
-
-
-def get_podcast_id_by_title(cnx, database_type, podcast_title):
- cursor = cnx.cursor()
-
- # get the podcast ID for the specified title
- # get the podcast ID for the specified title
- if database_type == "postgresql":
- cursor.execute('SELECT PodcastID FROM "Podcasts" WHERE Title = %s', (podcast_title,))
- else: # MySQL or MariaDB
- cursor.execute("SELECT PodcastID FROM Podcasts WHERE Title = %s", (podcast_title,))
-
- result = cursor.fetchone()
-
- if result:
- return result[0]
- else:
- return None
-
- cursor.close()
- # cnx.close()
- #
-
-def get_podcast_feed_by_id(cnx, database_type, podcast_id):
- cursor = cnx.cursor()
- # get the podcast ID for the specified title
- if database_type == "postgresql":
- cursor.execute('SELECT FeedURL FROM "Podcasts" WHERE PodcastID = %s', (podcast_id,))
- else: # MySQL or MariaDB
- cursor.execute("SELECT FeedURL FROM Podcasts WHERE PodcastID = %s", (podcast_id,))
- result = cursor.fetchone()
- if result:
- # Handle different return types
- if isinstance(result, dict):
- return result.get('feedurl') # Use lowercase as it seems your keys are lowercase
- elif isinstance(result, tuple) or isinstance(result, list):
- return result[0]
- else:
- # For any other type, try to return it directly
- return result
- else:
- return None
- # Note: The below code will never execute because of the return statements above
- cursor.close()
- # cnx.close()
-
-def refresh_podcast_by_title(cnx, database_type, podcast_title):
- # get the podcast ID for the specified title
- podcast_id = get_podcast_id_by_title(cnx, database_type, podcast_title)
-
- if podcast_id is not None:
- # refresh the podcast with the specified ID
- refresh_single_pod(cnx, database_type, podcast_id)
- else:
- print("Error: Could not find podcast with title {}".format(podcast_title))
-
-
-def refresh_single_pod(cnx, database_type, podcast_id):
- cursor = cnx.cursor()
-
- # get the feed URL and artwork URL for the specified podcast
- if database_type == "postgresql":
- cursor.execute('SELECT FeedURL, ArtworkURL FROM "Podcasts" WHERE PodcastID = %s', (podcast_id,))
- else: # MySQL or MariaDB
- cursor.execute("SELECT FeedURL, ArtworkURL FROM Podcasts WHERE PodcastID = %s", (podcast_id,))
- feed_url, artwork_url = cursor.fetchone()
-
- # parse the podcast feed
- episode_dump = feedparser.parse(feed_url)
-
- # get the list of episode titles already in the database
- if database_type == "postgresql":
- cursor.execute('SELECT EpisodeTitle FROM "Episodes" WHERE PodcastID = %s', (podcast_id,))
- else: # MySQL or MariaDB
- cursor.execute("SELECT EpisodeTitle FROM Episodes WHERE PodcastID = %s", (podcast_id,))
- existing_titles = set(row[0] for row in cursor.fetchall())
-
- # insert any new episodes into the database
- for entry in episode_dump.entries:
- if hasattr(entry, "title") and hasattr(entry, "summary") and hasattr(entry, "enclosures"):
- title = entry.title
-
- # skip episodes that are already in the database
- if title in existing_titles:
- continue
-
- description = entry.summary
- audio_url = entry.enclosures[0].href if entry.enclosures else ""
- release_date = dateutil.parser.parse(entry.published).strftime("%Y-%m-%d")
-
- # get the URL of the episode artwork, or use the podcast image URL if not available
- artwork_url = entry.get('itunes_image', {}).get('href', None) or entry.get('image', {}).get('href',
- None) or artwork_url
-
- # insert the episode into the database
- if database_type == "postgresql":
- add_episode = ('INSERT INTO "Episodes" '
- '(PodcastID, EpisodeTitle, EpisodeDescription, EpisodeURL, EpisodeArtwork, EpisodePubDate, EpisodeDuration) '
- 'VALUES (%s, %s, %s, %s, %s, %s, %s)')
- else: # MySQL or MariaDB
- add_episode = ("INSERT INTO Episodes "
- "(PodcastID, EpisodeTitle, EpisodeDescription, EpisodeURL, EpisodeArtwork, EpisodePubDate, EpisodeDuration) "
- "VALUES (%s, %s, %s, %s, %s, %s, %s)")
- episode_values = (podcast_id, title, description, audio_url, artwork_url, release_date, 0)
- cursor.execute(add_episode, episode_values)
-
- cnx.commit()
-
- cursor.close()
- # cnx.close()
-
-
-def get_hist_value(result, key, default=None):
- """
- Helper function to extract value from result set.
- It handles both dictionaries and tuples.
- """
- if isinstance(result, dict):
- return result.get(key, default)
- elif isinstance(result, tuple):
- key_map = {
- "UserEpisodeHistoryID": 0,
- }
- index = key_map.get(key)
- return result[index] if index is not None else default
- return default
-
-def record_podcast_history(cnx, database_type, episode_id, user_id, episode_pos, is_youtube=False):
- from datetime import datetime
- cursor = cnx.cursor()
- now = datetime.now()
- new_listen_duration = round(episode_pos)
-
- if is_youtube:
- # Handle YouTube video history
- if database_type == "postgresql":
- check_history = 'SELECT UserVideoHistoryID FROM "UserVideoHistory" WHERE VideoID = %s AND UserID = %s'
- else:
- check_history = "SELECT UserVideoHistoryID FROM UserVideoHistory WHERE VideoID = %s AND UserID = %s"
-
- cursor.execute(check_history, (episode_id, user_id))
- result = cursor.fetchone()
-
- if result is not None:
- # Update existing video history
- history_id = get_hist_value(result, "UserVideoHistoryID")
- if history_id is not None:
- if database_type == "postgresql":
- update_history = 'UPDATE "UserVideoHistory" SET ListenDuration = %s, ListenDate = %s WHERE UserVideoHistoryID = %s'
- else:
- update_history = "UPDATE UserVideoHistory SET ListenDuration = %s, ListenDate = %s WHERE UserVideoHistoryID = %s"
- cursor.execute(update_history, (new_listen_duration, now, history_id))
- else:
- # Add new video history record
- if database_type == "postgresql":
- add_history = 'INSERT INTO "UserVideoHistory" (VideoID, UserID, ListenDuration, ListenDate) VALUES (%s, %s, %s, %s)'
- else:
- add_history = "INSERT INTO UserVideoHistory (VideoID, UserID, ListenDuration, ListenDate) VALUES (%s, %s, %s, %s)"
- cursor.execute(add_history, (episode_id, user_id, new_listen_duration, now))
- else:
- # Handle regular podcast episode history (existing logic)
- if database_type == "postgresql":
- check_history = 'SELECT UserEpisodeHistoryID FROM "UserEpisodeHistory" WHERE EpisodeID = %s AND UserID = %s'
- else:
- check_history = "SELECT UserEpisodeHistoryID FROM UserEpisodeHistory WHERE EpisodeID = %s AND UserID = %s"
-
- cursor.execute(check_history, (episode_id, user_id))
- result = cursor.fetchone()
-
- if result is not None:
- history_id = get_hist_value(result, "UserEpisodeHistoryID")
- if history_id is not None:
- if database_type == "postgresql":
- update_history = 'UPDATE "UserEpisodeHistory" SET ListenDuration = %s, ListenDate = %s WHERE UserEpisodeHistoryID = %s'
- else:
- update_history = "UPDATE UserEpisodeHistory SET ListenDuration = %s, ListenDate = %s WHERE UserEpisodeHistoryID = %s"
- cursor.execute(update_history, (new_listen_duration, now, history_id))
- else:
- if database_type == "postgresql":
- add_history = 'INSERT INTO "UserEpisodeHistory" (EpisodeID, UserID, ListenDuration, ListenDate) VALUES (%s, %s, %s, %s)'
- else:
- add_history = "INSERT INTO UserEpisodeHistory (EpisodeID, UserID, ListenDuration, ListenDate) VALUES (%s, %s, %s, %s)"
- cursor.execute(add_history, (episode_id, user_id, new_listen_duration, now))
-
- cnx.commit()
- cursor.close()
-
-
-def get_user_id(cnx, database_type, username):
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'SELECT UserID FROM "Users" WHERE Username = %s'
- else:
- query = "SELECT UserID FROM Users WHERE Username = %s"
- cursor.execute(query, (username,))
- result = cursor.fetchone()
- cursor.close()
- # cnx.close()
-
- if result:
- return result[0]
- else:
- return 1
-
-def get_existing_youtube_videos(cnx, database_type, podcast_id):
- """Get list of existing YouTube video URLs for a podcast"""
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = '''
- SELECT VideoURL FROM "YouTubeVideos"
- WHERE PodcastID = %s
- '''
- else:
- query = '''
- SELECT VideoURL FROM YouTubeVideos
- WHERE PodcastID = %s
- '''
-
- cursor.execute(query, (podcast_id,))
- results = cursor.fetchall()
- cursor.close()
-
- existing_urls = set()
- if results:
- for result in results:
- if isinstance(result, dict):
- url = result.get("videourl")
- elif isinstance(result, tuple):
- url = result[0]
- if url:
- existing_urls.add(url)
-
- return existing_urls
-
-def get_user_id_from_pod_id(cnx, database_type, podcast_id):
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'SELECT UserID FROM "Podcasts" WHERE PodcastID = %s'
- else:
- query = "SELECT UserID FROM Podcasts WHERE PodcastID = %s"
-
- cursor.execute(query, (podcast_id,))
- result = cursor.fetchone()
-
- if result:
- # Check if the result is a dictionary or tuple
- if isinstance(result, dict):
- user_id = result.get("userid")
- elif isinstance(result, tuple):
- user_id = result[0]
- else:
- user_id = None
- else:
- user_id = None
-
- cursor.close()
- return user_id
-
-
-def get_user_details(cnx, database_type, username):
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'SELECT * FROM "Users" WHERE Username = %s'
- else:
- query = "SELECT * FROM Users WHERE Username = %s"
- cursor.execute(query, (username,))
- result = cursor.fetchone()
- cursor.close()
- # cnx.close()
-
- if result:
- return {
- 'UserID': result[0],
- 'Fullname': result[1],
- 'Username': result[2],
- 'Email': result[3],
- 'Hashed_PW': result[4]
- }
- else:
- return None
-
-
-def get_user_details_id(cnx, database_type, user_id):
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'SELECT * FROM "Users" WHERE UserID = %s'
- else:
- query = "SELECT * FROM Users WHERE UserID = %s"
- cursor.execute(query, (user_id,))
- result = cursor.fetchone()
- cursor.close()
- # cnx.close()
-
- if result:
- if isinstance(result, dict):
- return {
- 'UserID': result['userid'],
- 'Fullname': result['fullname'],
- 'Username': result['username'],
- 'Email': result['email'],
- 'Hashed_PW': result['hashed_pw']
- }
- elif isinstance(result, tuple):
- return {
- 'UserID': result[0],
- 'Fullname': result[1],
- 'Username': result[2],
- 'Email': result[3],
- 'Hashed_PW': result[4]
- }
- else:
- return None
-
-
-def user_history(cnx, database_type, user_id):
- if not cnx:
- logging.error("Database connection is None.")
- return []
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- query = """
- SELECT * FROM (
- SELECT
- "Episodes".EpisodeID as episodeid,
- "UserEpisodeHistory".ListenDate as listendate,
- "UserEpisodeHistory".ListenDuration as listenduration,
- "Episodes".EpisodeTitle as episodetitle,
- "Episodes".EpisodeDescription as episodedescription,
- "Episodes".EpisodeArtwork as episodeartwork,
- "Episodes".EpisodeURL as episodeurl,
- "Episodes".EpisodeDuration as episodeduration,
- "Podcasts".PodcastName as podcastname,
- "Episodes".EpisodePubDate as episodepubdate,
- "Episodes".Completed as completed,
- FALSE as is_youtube
- FROM "UserEpisodeHistory"
- JOIN "Episodes" ON "UserEpisodeHistory".EpisodeID = "Episodes".EpisodeID
- JOIN "Podcasts" ON "Episodes".PodcastID = "Podcasts".PodcastID
- WHERE "UserEpisodeHistory".UserID = %s
-
- UNION ALL
-
- SELECT
- "YouTubeVideos".VideoID as episodeid,
- NULL as listendate, -- YouTube doesn't track listen date currently
- "YouTubeVideos".ListenPosition as listenduration,
- "YouTubeVideos".VideoTitle as episodetitle,
- "YouTubeVideos".VideoDescription as episodedescription,
- "YouTubeVideos".ThumbnailURL as episodeartwork,
- "YouTubeVideos".VideoURL as episodeurl,
- "YouTubeVideos".Duration as episodeduration,
- "Podcasts".PodcastName as podcastname,
- "YouTubeVideos".PublishedAt as episodepubdate,
- "YouTubeVideos".Completed as completed,
- TRUE as is_youtube
- FROM "YouTubeVideos"
- JOIN "Podcasts" ON "YouTubeVideos".PodcastID = "Podcasts".PodcastID
- WHERE "YouTubeVideos".ListenPosition > 0
- AND "Podcasts".UserID = %s
- ) combined
- ORDER BY listendate DESC NULLS LAST
- """
- else: # MySQL/MariaDB
- cursor = cnx.cursor(dictionary=True)
- query = """
- SELECT * FROM (
- SELECT
- Episodes.EpisodeID as episodeid,
- UserEpisodeHistory.ListenDate as listendate,
- UserEpisodeHistory.ListenDuration as listenduration,
- Episodes.EpisodeTitle as episodetitle,
- Episodes.EpisodeDescription as episodedescription,
- Episodes.EpisodeArtwork as episodeartwork,
- Episodes.EpisodeURL as episodeurl,
- Episodes.EpisodeDuration as episodeduration,
- Podcasts.PodcastName as podcastname,
- Episodes.EpisodePubDate as episodepubdate,
- Episodes.Completed as completed,
- FALSE as is_youtube
- FROM UserEpisodeHistory
- JOIN Episodes ON UserEpisodeHistory.EpisodeID = Episodes.EpisodeID
- JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID
- WHERE UserEpisodeHistory.UserID = %s
-
- UNION ALL
-
- SELECT
- YouTubeVideos.VideoID as episodeid,
- NULL as listendate,
- YouTubeVideos.ListenPosition as listenduration,
- YouTubeVideos.VideoTitle as episodetitle,
- YouTubeVideos.VideoDescription as episodedescription,
- YouTubeVideos.ThumbnailURL as episodeartwork,
- YouTubeVideos.VideoURL as episodeurl,
- YouTubeVideos.Duration as episodeduration,
- Podcasts.PodcastName as podcastname,
- YouTubeVideos.PublishedAt as episodepubdate,
- YouTubeVideos.Completed as completed,
- TRUE as is_youtube
- FROM YouTubeVideos
- JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID
- WHERE YouTubeVideos.ListenPosition > 0
- AND Podcasts.UserID = %s
- ) combined
- ORDER BY listendate DESC
- """
-
- cursor.execute(query, (user_id, user_id))
- results = cursor.fetchall()
- if not results:
- logging.info("No results found for user history.")
- return []
-
- # Get column descriptions
- columns = [col[0].lower() for col in cursor.description]
-
- # Convert results to list of dictionaries
- history_episodes = []
- for row in results:
- episode = {}
- if isinstance(row, tuple):
- for idx, column_name in enumerate(columns):
- value = row[idx]
- if column_name in ['completed', 'is_youtube']:
- value = bool(value)
- episode[column_name] = value
- elif isinstance(row, dict):
- for k, v in row.items():
- column_name = k.lower()
- value = v
- if column_name in ['completed', 'is_youtube']:
- value = bool(value)
- episode[column_name] = value
- else:
- logging.error(f"Unexpected row type: {type(row)}")
- history_episodes.append(episode)
-
- return lowercase_keys(history_episodes)
-
- except Exception as e:
- logging.error(f"Error executing user_history query: {e}")
- raise
- finally:
- cursor.close()
-
-def download_podcast(cnx, database_type, episode_id, user_id, task_id=None, progress_callback=None):
- logging.basicConfig(level=logging.INFO)
- logger = logging.getLogger(__name__)
- print('download podcast is running')
- """
- Download a podcast episode with progress tracking.
-
- Args:
- cnx: Database connection
- database_type: Type of database (postgresql or mysql)
- episode_id: ID of the episode to download
- user_id: ID of the user requesting the download
- task_id: Optional Celery task ID for progress tracking
- progress_callback: Optional callback function to report progress (fn(progress, status))
-
- Returns:
- bool: True if successful, False otherwise
- """
- cursor = None
- temp_file = None
-
- try:
- # Import task-specific modules inside function to avoid circular imports
- if task_id:
- from database_functions.tasks import download_manager
-
- cursor = cnx.cursor()
-
- # First, check if already downloaded to avoid duplicate work
- if database_type == "postgresql":
- query = 'SELECT 1 FROM "DownloadedEpisodes" WHERE EpisodeID = %s AND UserID = %s'
- else:
- query = "SELECT 1 FROM DownloadedEpisodes WHERE EpisodeID = %s AND UserID = %s"
-
- cursor.execute(query, (episode_id, user_id))
- if cursor.fetchone():
- logger.info(f"Episode {episode_id} already downloaded for user {user_id}")
- # Update task progress to 100% if task_id is provided
- if task_id:
- download_manager.update_task(task_id, 100.0, "SUCCESS")
- if progress_callback:
- progress_callback(100.0, "SUCCESS")
- return True
-
- # Get episode details
- if database_type == "postgresql":
- query = '''
- SELECT
- e.EpisodeID,
- e.PodcastID,
- e.EpisodeTitle,
- e.EpisodePubDate,
- e.EpisodeURL,
- e.EpisodeDescription,
- e.EpisodeArtwork,
- p.PodcastName,
- p.Author,
- p.ArtworkURL
- FROM "Episodes" e
- JOIN "Podcasts" p ON e.PodcastID = p.PodcastID
- WHERE e.EpisodeID = %s
- '''
- else:
- query = '''
- SELECT
- e.EpisodeID,
- e.PodcastID,
- e.EpisodeTitle,
- e.EpisodePubDate,
- e.EpisodeURL,
- e.EpisodeDescription,
- e.EpisodeArtwork,
- p.PodcastName,
- p.Author,
- p.ArtworkURL
- FROM Episodes e
- JOIN Podcasts p ON e.PodcastID = p.PodcastID
- WHERE e.EpisodeID = %s
- '''
-
- cursor.execute(query, (episode_id,))
- result = cursor.fetchone()
-
- if result is None:
- logger.error(f"Episode {episode_id} not found")
- if task_id:
- download_manager.update_task(task_id, 0.0, "FAILED")
- if progress_callback:
- progress_callback(0.0, "FAILED")
- return False
-
- # Extract episode details
- if isinstance(result, dict):
- episode_url = result.get('episodeurl') or result.get('EpisodeURL')
- podcast_name = result.get('podcastname') or result.get('PodcastName')
- episode_title = result.get('episodetitle') or result.get('EpisodeTitle')
- pub_date = result.get('episodepubdate') or result.get('EpisodePubDate')
- author = result.get('author') or result.get('Author')
- episode_artwork = result.get('episodeartwork') or result.get('EpisodeArtwork')
- artwork_url = result.get('artworkurl') or result.get('ArtworkURL')
- else:
- # Match positions from SELECT query
- episode_url = result[4] # EpisodeURL
- podcast_name = result[7] # PodcastName
- episode_title = result[2] # EpisodeTitle
- pub_date = result[3] # EpisodePubDate
- author = result[8] # Author
- episode_artwork = result[6] # EpisodeArtwork
- artwork_url = result[9] # ArtworkURL
-
- # Update task progress if task_id is provided
- if task_id:
- download_manager.update_task(task_id, 5.0, "STARTED")
- if progress_callback:
- progress_callback(5.0, "STARTED")
-
- # Get user's time and date preferences
- timezone, time_format, date_format = get_time_info(database_type, cnx, user_id)
-
- # Use default format if user preferences aren't set
- if not date_format:
- date_format = "ISO"
-
- # Format the publication date based on user preference
- date_format_map = {
- "ISO": "%Y-%m-%d",
- "USA": "%m/%d/%Y",
- "EUR": "%d.%m.%Y",
- "JIS": "%Y-%m-%d",
- "MDY": "%m-%d-%Y",
- "DMY": "%d-%m-%Y",
- "YMD": "%Y-%m-%d",
- }
-
- date_format_str = date_format_map.get(date_format, "%Y-%m-%d")
- filename_date_format_str = date_format_str.replace('/', '-').replace('\\', '-')
- pub_date_str = pub_date.strftime(filename_date_format_str)
-
-
- # Clean filenames of invalid characters
- podcast_name = "".join(c for c in podcast_name if c.isalnum() or c in (' ', '-', '_')).strip()
- episode_title = "".join(c for c in episode_title if c.isalnum() or c in (' ', '-', '_')).strip()
-
- # Create the download directory
- download_dir = os.path.join("/opt/pinepods/downloads", podcast_name)
- os.makedirs(download_dir, exist_ok=True)
- uid = int(os.environ.get('PUID', 1000))
- gid = int(os.environ.get('PGID', 1000))
- os.chown(download_dir, uid, gid)
-
- # Generate filename with enhanced details
- filename = f"{pub_date_str}_{episode_title}_{user_id}-{episode_id}.mp3"
- file_path = os.path.join(download_dir, filename)
-
- # Check if file already exists
- if os.path.exists(file_path):
- # File exists but not in database, add the database entry
- downloaded_date = datetime.datetime.fromtimestamp(os.path.getctime(file_path))
- file_size = os.path.getsize(file_path)
-
- if database_type == "postgresql":
- query = '''
- INSERT INTO "DownloadedEpisodes"
- (UserID, EpisodeID, DownloadedDate, DownloadedSize, DownloadedLocation)
- VALUES (%s, %s, %s, %s, %s)
- '''
- else:
- query = '''
- INSERT INTO DownloadedEpisodes
- (UserID, EpisodeID, DownloadedDate, DownloadedSize, DownloadedLocation)
- VALUES (%s, %s, %s, %s, %s)
- '''
-
- cursor.execute(query, (user_id, episode_id, downloaded_date, file_size, file_path))
- cnx.commit()
-
- if task_id:
- download_manager.update_task(task_id, 100.0, "SUCCESS")
- if progress_callback:
- progress_callback(100.0, "SUCCESS")
-
- logger.info(f"File already exists, added to database: {file_path}")
- return True
-
- # Create a temporary file for download
- temp_file = tempfile.NamedTemporaryFile(delete=False, suffix='.mp3')
- temp_path = temp_file.name
- temp_file.close()
-
- if task_id:
- download_manager.update_task(task_id, 10.0, "DOWNLOADING")
- if progress_callback:
- progress_callback(10.0, "DOWNLOADING")
-
- # Download the file with progress tracking
- logger.info(f"Starting download of episode {episode_id} from {episode_url}")
-
- try:
- headers = {
- 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
- 'Referer': 'https://www.buzzsprout.com/',
- 'Accept': '*/*',
- 'Accept-Language': 'en-US,en;q=0.9',
- 'Accept-Encoding': 'gzip, deflate, br'
- }
-
- with requests.get(episode_url, stream=True, headers=headers) as response:
- response.raise_for_status()
- downloaded_date = datetime.datetime.now()
- file_size = int(response.headers.get("Content-Length", 0))
-
- # Stream the download to temporary file with progress tracking
- downloaded_bytes = 0
- with open(temp_path, "wb") as f:
- for chunk in response.iter_content(chunk_size=8192):
- if chunk:
- f.write(chunk)
- downloaded_bytes += len(chunk)
-
- # Update progress every ~5% if file size is known
- if file_size > 0:
- progress = (downloaded_bytes / file_size) * 100
- # Only update at certain intervals to reduce overhead
- if downloaded_bytes % (file_size // 20 + 1) < 8192: # ~5% intervals
- download_progress = 10.0 + (progress * 0.8) # Scale to 10-90%
- if task_id:
- download_manager.update_task(task_id, download_progress, "DOWNLOADING")
- if progress_callback:
- progress_callback(download_progress, "DOWNLOADING")
- except Exception as e:
- logger.error(f"Failed to download episode {episode_id}: {str(e)}")
- if task_id:
- download_manager.update_task(task_id, 0.0, "FAILED")
- if progress_callback:
- progress_callback(0.0, "FAILED")
-
- # Clean up temp file
- if os.path.exists(temp_path):
- os.unlink(temp_path)
-
- raise
-
- if task_id:
- download_manager.update_task(task_id, 90.0, "FINALIZING")
- if progress_callback:
- progress_callback(90.0, "FINALIZING")
-
- print(f"DEBUG - Moving temp file from: {temp_path}")
- print(f"DEBUG - Moving to destination: {file_path}")
- print(f"DEBUG - Directory exists check: {os.path.exists(os.path.dirname(file_path))}")
- print(f"DEBUG - Date format being used: {date_format} -> {date_format_str}")
- print(f"DEBUG - Formatted date: {pub_date_str}")
-
- # Move the temporary file to the final location
- shutil.move(temp_path, file_path)
-
- # Set permissions
- os.chown(file_path, uid, gid)
-
- # Add metadata to the file
- metadata = {
- 'title': episode_title,
- 'artist': author,
- 'album': podcast_name,
- 'date': pub_date_str,
- 'artwork_url': episode_artwork or artwork_url
- }
-
- try:
- from database_functions import mp3_metadata
- mp3_metadata.add_podcast_metadata(file_path, metadata)
- except Exception as e:
- logger.warning(f"Failed to add metadata to {file_path}: {e}")
-
- # Update database
- if database_type == "postgresql":
- query = '''
- INSERT INTO "DownloadedEpisodes"
- (UserID, EpisodeID, DownloadedDate, DownloadedSize, DownloadedLocation)
- VALUES (%s, %s, %s, %s, %s)
- '''
- else:
- query = '''
- INSERT INTO DownloadedEpisodes
- (UserID, EpisodeID, DownloadedDate, DownloadedSize, DownloadedLocation)
- VALUES (%s, %s, %s, %s, %s)
- '''
-
- cursor.execute(query, (user_id, episode_id, downloaded_date, file_size, file_path))
-
- # Update download count
- if database_type == "postgresql":
- query = 'UPDATE "UserStats" SET EpisodesDownloaded = EpisodesDownloaded + 1 WHERE UserID = %s'
- else:
- query = "UPDATE UserStats SET EpisodesDownloaded = EpisodesDownloaded + 1 WHERE UserID = %s"
-
- cursor.execute(query, (user_id,))
- cnx.commit()
-
- if task_id:
- download_manager.update_task(task_id, 100.0, "SUCCESS")
- if progress_callback:
- progress_callback(100.0, "SUCCESS")
-
- logger.info(f"Successfully downloaded episode {episode_id} to {file_path}")
- return True
-
- except requests.RequestException as e:
- logger.error(f"Network error downloading episode {episode_id}: {e}")
- if cursor:
- cnx.rollback()
- if task_id:
- download_manager.update_task(task_id, 0.0, "FAILED")
- if progress_callback:
- progress_callback(0.0, "FAILED")
- return False
- except Exception as e:
- logger.error(f"Error downloading episode {episode_id}: {e}", exc_info=True)
- if cursor:
- cnx.rollback()
- if task_id:
- download_manager.update_task(task_id, 0.0, "FAILED")
- if progress_callback:
- progress_callback(0.0, "FAILED")
- return False
- finally:
- if cursor:
- cursor.close()
- # Clean up temporary file if it exists and wasn't moved
- if temp_file and os.path.exists(temp_file.name):
- try:
- os.unlink(temp_file.name)
- except:
- pass
-
-def get_episode_ids_for_podcast(cnx, database_type, podcast_id):
- """
- Get episode IDs and titles for a podcast.
- Handles both PostgreSQL and MariaDB/MySQL return types.
- PostgreSQL uses lowercase column names, MariaDB uses uppercase.
- """
- cursor = cnx.cursor()
- print(f"Database type: {database_type}")
-
- if database_type == "postgresql":
- # In PostgreSQL, table names are capitalized but column names are lowercase
- query = 'SELECT "episodeid", "episodetitle" FROM "Episodes" WHERE "podcastid" = %s'
- else: # MySQL or MariaDB
- query = "SELECT EpisodeID, EpisodeTitle FROM Episodes WHERE PodcastID = %s"
-
- cursor.execute(query, (podcast_id,))
- results = cursor.fetchall()
- print(f"Raw query results (first 3): {results[:3]}")
-
- episodes = []
- for row in results:
- # Handle different return types from different database drivers
- if isinstance(row, dict):
- # Dictionary return (sometimes from MariaDB)
- if "episodeid" in row: # PostgreSQL lowercase keys
- episode_id = row["episodeid"]
- episode_title = row.get("episodetitle", "")
- else: # MariaDB uppercase keys
- episode_id = row["EpisodeID"]
- episode_title = row.get("EpisodeTitle", "")
- else:
- # Tuple return (most common from PostgreSQL)
- episode_id = row[0]
- episode_title = row[1] if len(row) > 1 else ""
-
- # Check for None, empty string, or 'None' string
- if not episode_title or episode_title == 'None':
- # Get a real episode title from the database if possible
- title_query = (
- 'SELECT "episodetitle" FROM "Episodes" WHERE "episodeid" = %s'
- if database_type == "postgresql"
- else "SELECT EpisodeTitle FROM Episodes WHERE EpisodeID = %s"
- )
- cursor.execute(title_query, (episode_id,))
- title_result = cursor.fetchone()
-
- if title_result and title_result[0]:
- episode_title = title_result[0]
- else:
- # Look up the title by podcast name + episode number if we can
- ordinal_query = (
- 'SELECT p."podcastname", COUNT(*) as episode_num FROM "Episodes" e '
- 'JOIN "Podcasts" p ON e."podcastid" = p."podcastid" '
- 'WHERE p."podcastid" = %s AND e."episodeid" <= %s '
- 'GROUP BY p."podcastname"'
- if database_type == "postgresql"
- else "SELECT p.PodcastName, COUNT(*) as episode_num FROM Episodes e "
- "JOIN Podcasts p ON e.PodcastID = p.PodcastID "
- "WHERE p.PodcastID = %s AND e.EpisodeID <= %s "
- "GROUP BY p.PodcastName"
- )
- cursor.execute(ordinal_query, (podcast_id, episode_id))
- ordinal_result = cursor.fetchone()
-
- if ordinal_result and len(ordinal_result) >= 2:
- podcast_name = ordinal_result[0]
- episode_num = ordinal_result[1]
- episode_title = f"{podcast_name} - Episode {episode_num}"
- else:
- # Last resort fallback
- episode_title = f"Episode #{episode_id}"
-
- episodes.append({"id": episode_id, "title": episode_title})
-
- print(f"Processed episodes (first 3): {episodes[:3]}")
- cursor.close()
- return episodes
-
-def get_video_ids_for_podcast(cnx, database_type, podcast_id):
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- query = """
- SELECT VideoID
- FROM "YouTubeVideos"
- WHERE PodcastID = %s
- ORDER BY PublishedAt DESC
- """
- else:
- query = """
- SELECT VideoID
- FROM YouTubeVideos
- WHERE PodcastID = %s
- ORDER BY PublishedAt DESC
- """
-
- cursor.execute(query, (podcast_id,))
- results = cursor.fetchall()
-
- # Extract the video IDs, handling both tuple and dict results
- video_ids = [row[0] if isinstance(row, tuple) else row['videoid'] for row in results]
- return video_ids
-
- finally:
- cursor.close()
-
-def get_podcast_index_id(cnx, database_type, podcast_id):
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- query = 'SELECT PodcastIndexID FROM "Podcasts" WHERE PodcastID = %s'
- else: # MySQL or MariaDB
- query = "SELECT PodcastIndexID FROM Podcasts WHERE PodcastID = %s"
-
- cursor.execute(query, (podcast_id,))
- result = cursor.fetchone()
- if result:
- return result[0] if isinstance(result, tuple) else result.get("podcastindexid")
- return None
- finally:
- cursor.close()
-
-
-def download_youtube_video(cnx, database_type, video_id, user_id, task_id=None, progress_callback=None):
- """
- Download a YouTube video with progress tracking.
-
- Args:
- cnx: Database connection
- database_type: Type of database (postgresql or mysql)
- video_id: ID of the video to download
- user_id: ID of the user requesting the download
- task_id: Optional Celery task ID for progress tracking
- progress_callback: Optional callback function to report progress (fn(progress, status))
-
- Returns:
- bool: True if successful, False otherwise
- """
- cursor = None
-
- try:
- # Import task-specific modules inside function to avoid circular imports
- if task_id:
- from database_functions.tasks import download_manager
-
- cursor = cnx.cursor()
-
- # Check if already downloaded
- if database_type == "postgresql":
- query = 'SELECT 1 FROM "DownloadedVideos" WHERE VideoID = %s AND UserID = %s'
- else:
- query = "SELECT 1 FROM DownloadedVideos WHERE VideoID = %s AND UserID = %s"
-
- cursor.execute(query, (video_id, user_id))
- if cursor.fetchone():
- # Update task progress to 100% if task_id is provided
- if task_id:
- download_manager.update_task(task_id, 100.0, "SUCCESS")
- if progress_callback:
- progress_callback(100.0, "SUCCESS")
- return True
-
- # Update progress if task_id is provided
- if task_id:
- download_manager.update_task(task_id, 5.0, "STARTED")
- if progress_callback:
- progress_callback(5.0, "STARTED")
-
- # Get video details
- if database_type == "postgresql":
- query = '''
- SELECT
- v.VideoID,
- v.PodcastID,
- v.VideoTitle,
- v.PublishedAt,
- v.VideoURL,
- v.VideoDescription,
- v.ThumbnailURL,
- v.YouTubeVideoID,
- p.PodcastName,
- p.Author
- FROM "YouTubeVideos" v
- JOIN "Podcasts" p ON v.PodcastID = p.PodcastID
- WHERE v.VideoID = %s
- '''
- else:
- query = '''
- SELECT
- v.VideoID,
- v.PodcastID,
- v.VideoTitle,
- v.PublishedAt,
- v.VideoURL,
- v.VideoDescription,
- v.ThumbnailURL,
- v.YouTubeVideoID,
- p.PodcastName,
- p.Author
- FROM YouTubeVideos v
- JOIN Podcasts p ON v.PodcastID = p.PodcastID
- WHERE v.VideoID = %s
- '''
-
- cursor.execute(query, (video_id,))
- result = cursor.fetchone()
-
- if result is None:
- if task_id:
- download_manager.update_task(task_id, 0.0, "FAILED")
- if progress_callback:
- progress_callback(0.0, "FAILED")
- return False
-
- # Extract values
- if isinstance(result, dict):
- youtube_video_id = result.get('youtubevideoid') or result.get('YouTubeVideoID')
- video_title = result.get('videotitle') or result.get('VideoTitle')
- pub_date = result.get('publishedat') or result.get('PublishedAt')
- channel_name = result.get('podcastname') or result.get('PodcastName')
- author = result.get('author') or result.get('Author')
- else:
- youtube_video_id = result[7] # YouTubeVideoID
- video_title = result[2] # VideoTitle
- pub_date = result[3] # PublishedAt
- channel_name = result[8] # PodcastName
- author = result[9] # Author
-
- if task_id:
- download_manager.update_task(task_id, 10.0, "PROCESSING")
- if progress_callback:
- progress_callback(10.0, "PROCESSING")
-
- # Get user's time/date preferences and format date
- timezone, time_format, date_format = get_time_info(database_type, cnx, user_id)
- date_format = date_format or "ISO"
- date_format_map = {
- "ISO": "%Y-%m-%d",
- "USA": "%m/%d/%Y",
- "EUR": "%d.%m.%Y",
- "JIS": "%Y-%m-%d",
- "MDY": "%m-%d-%Y",
- "DMY": "%d-%m-%Y",
- "YMD": "%Y-%m-%d",
- }
- date_format_str = date_format_map.get(date_format, "%Y-%m-%d")
- filename_date_format_str = date_format_str.replace('/', '-').replace('\\', '-')
- pub_date_str = pub_date.strftime(filename_date_format_str)
-
- # Clean filenames
- channel_name = "".join(c for c in channel_name if c.isalnum() or c in (' ', '-', '_')).strip()
- video_title = "".join(c for c in video_title if c.isalnum() or c in (' ', '-', '_')).strip()
-
- # Source and destination paths
- source_path = f"/opt/pinepods/downloads/youtube/{youtube_video_id}.mp3"
- if not os.path.exists(source_path):
- source_path = f"{source_path}.mp3" # Try with double extension
- if not os.path.exists(source_path):
- if task_id:
- download_manager.update_task(task_id, 0.0, "FAILED")
- if progress_callback:
- progress_callback(0.0, "FAILED")
- return False
-
- if task_id:
- download_manager.update_task(task_id, 30.0, "PREPARING_DESTINATION")
- if progress_callback:
- progress_callback(30.0, "PREPARING_DESTINATION")
-
- # Create destination directory
- download_dir = os.path.join("/opt/pinepods/downloads", channel_name)
- os.makedirs(download_dir, exist_ok=True)
-
- # Set proper file permissions
- uid = int(os.environ.get('PUID', 1000))
- gid = int(os.environ.get('PGID', 1000))
- os.chown(download_dir, uid, gid)
-
- # Generate destination filename
- filename = f"{pub_date_str}_{video_title}_{user_id}-{video_id}.mp3"
- dest_path = os.path.join(download_dir, filename)
-
- if task_id:
- download_manager.update_task(task_id, 50.0, "DOWNLOADING")
- if progress_callback:
- progress_callback(50.0, "DOWNLOADING")
-
- # Copy file with progress tracking
- try:
- # Get source file size for progress tracking
- source_size = os.path.getsize(source_path)
-
- # Use buffer-based copying to enable progress tracking
- with open(source_path, 'rb') as src_file, open(dest_path, 'wb') as dest_file:
- copied = 0
- buffer_size = 8192 # 8KB buffer
-
- while True:
- buffer = src_file.read(buffer_size)
- if not buffer:
- break
-
- dest_file.write(buffer)
- copied += len(buffer)
-
- if source_size > 0:
- # Calculate progress (50-80% range for copying)
- copy_progress = 50.0 + ((copied / source_size) * 30.0)
-
- # Update progress every ~5% to reduce overhead
- if copied % (source_size // 20 + 1) < buffer_size:
- if task_id:
- download_manager.update_task(task_id, copy_progress, "DOWNLOADING")
- if progress_callback:
- progress_callback(copy_progress, "DOWNLOADING")
-
- except Exception as e:
- if os.path.exists(dest_path):
- os.unlink(dest_path) # Clean up incomplete file
- if task_id:
- download_manager.update_task(task_id, 0.0, "FAILED")
- if progress_callback:
- progress_callback(0.0, "FAILED")
- raise
-
- # Set proper permissions on destination file
- os.chown(dest_path, uid, gid)
-
- if task_id:
- download_manager.update_task(task_id, 80.0, "FINALIZING")
- if progress_callback:
- progress_callback(80.0, "FINALIZING")
-
- # Update metadata
- try:
- metadata = {
- 'title': video_title,
- 'artist': author,
- 'album': channel_name,
- 'date': pub_date_str
- }
- from database_functions import mp3_metadata
- mp3_metadata.add_podcast_metadata(dest_path, metadata)
- except Exception as e:
- print(f"Failed to add metadata to {dest_path}: {e}")
- # Continue despite metadata failure
-
- if task_id:
- download_manager.update_task(task_id, 90.0, "UPDATING_DATABASE")
- if progress_callback:
- progress_callback(90.0, "UPDATING_DATABASE")
-
- # Record in database
- file_size = os.path.getsize(dest_path)
- downloaded_date = datetime.datetime.now()
-
- if database_type == "postgresql":
- query = '''
- INSERT INTO "DownloadedVideos"
- (UserID, VideoID, DownloadedDate, DownloadedSize, DownloadedLocation)
- VALUES (%s, %s, %s, %s, %s)
- '''
- else:
- query = '''
- INSERT INTO DownloadedVideos
- (UserID, VideoID, DownloadedDate, DownloadedSize, DownloadedLocation)
- VALUES (%s, %s, %s, %s, %s)
- '''
-
- cursor.execute(query, (user_id, video_id, downloaded_date, file_size, dest_path))
-
- # Update download count
- if database_type == "postgresql":
- query = 'UPDATE "UserStats" SET EpisodesDownloaded = EpisodesDownloaded + 1 WHERE UserID = %s'
- else:
- query = "UPDATE UserStats SET EpisodesDownloaded = EpisodesDownloaded + 1 WHERE UserID = %s"
-
- cursor.execute(query, (user_id,))
- cnx.commit()
-
- if task_id:
- download_manager.update_task(task_id, 100.0, "SUCCESS")
- if progress_callback:
- progress_callback(100.0, "SUCCESS")
-
- print(f"Successfully downloaded YouTube video {video_id} to {dest_path}")
- return True
-
- except Exception as e:
- print(f"Error downloading YouTube video {video_id}: {str(e)}", exc_info=True)
- if cursor:
- cnx.rollback()
- if task_id:
- download_manager.update_task(task_id, 0.0, "FAILED")
- if progress_callback:
- progress_callback(0.0, "FAILED")
- return False
- finally:
- if cursor:
- cursor.close()
-
-
-
-
-def get_podcast_id_from_episode(cnx, database_type, episode_id, user_id, is_youtube=False):
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- if is_youtube:
- query = """
- SELECT "YouTubeVideos".PodcastID
- FROM "YouTubeVideos"
- INNER JOIN "Podcasts" ON "YouTubeVideos".PodcastID = "Podcasts".PodcastID
- WHERE "YouTubeVideos".VideoID = %s AND "Podcasts".UserID = %s
- """
- else:
- query = """
- SELECT "Episodes".PodcastID
- FROM "Episodes"
- INNER JOIN "Podcasts" ON "Episodes".PodcastID = "Podcasts".PodcastID
- WHERE "Episodes".EpisodeID = %s AND "Podcasts".UserID = %s
- """
- else: # MySQL or MariaDB
- if is_youtube:
- query = """
- SELECT YouTubeVideos.PodcastID
- FROM YouTubeVideos
- INNER JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID
- WHERE YouTubeVideos.VideoID = %s AND Podcasts.UserID = %s
- """
- else:
- query = """
- SELECT Episodes.PodcastID
- FROM Episodes
- INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID
- WHERE Episodes.EpisodeID = %s AND Podcasts.UserID = %s
- """
-
- # First try with provided user_id
- cursor.execute(query, (episode_id, user_id))
- result = cursor.fetchone()
-
- # If not found, try with system user (1)
- if not result:
- cursor.execute(query, (episode_id, 1))
- result = cursor.fetchone()
-
- if result:
- return result[0] if isinstance(result, tuple) else result.get("podcastid")
- return None
- except Exception as e:
- logging.error(f"Error in get_podcast_id_from_episode: {str(e)}")
- return None
- finally:
- cursor.close()
-
-def get_podcast_id_from_episode_name(cnx, database_type, episode_name, episode_url, user_id):
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- query = """
- SELECT podcast_id FROM (
- SELECT "Episodes".PodcastID as podcast_id
- FROM "Episodes"
- INNER JOIN "Podcasts" ON "Episodes".PodcastID = "Podcasts".PodcastID
- WHERE "Episodes".EpisodeTitle = %s
- AND "Episodes".EpisodeURL = %s
- AND "Podcasts".UserID = %s
-
- UNION
-
- SELECT "YouTubeVideos".PodcastID as podcast_id
- FROM "YouTubeVideos"
- INNER JOIN "Podcasts" ON "YouTubeVideos".PodcastID = "Podcasts".PodcastID
- WHERE "YouTubeVideos".VideoTitle = %s
- AND "YouTubeVideos".VideoURL = %s
- AND "Podcasts".UserID = %s
- ) combined_results
- LIMIT 1
- """
- # Pass the parameters twice because we're using them in both parts of the UNION
- cursor.execute(query, (episode_name, episode_url, user_id, episode_name, episode_url, user_id))
- else: # MySQL or MariaDB
- query = """
- SELECT podcast_id FROM (
- SELECT Episodes.PodcastID as podcast_id
- FROM Episodes
- INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID
- WHERE Episodes.EpisodeTitle = %s
- AND Episodes.EpisodeURL = %s
- AND Podcasts.UserID = %s
-
- UNION
-
- SELECT YouTubeVideos.PodcastID as podcast_id
- FROM YouTubeVideos
- INNER JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID
- WHERE YouTubeVideos.VideoTitle = %s
- AND YouTubeVideos.VideoURL = %s
- AND Podcasts.UserID = %s
- ) combined_results
- LIMIT 1
- """
- cursor.execute(query, (episode_name, episode_url, user_id, episode_name, episode_url, user_id))
-
- result = cursor.fetchone()
- if result:
- return result[0] if isinstance(result, tuple) else result.get("podcast_id")
- return None
- except Exception as e:
- logging.error(f"Error in get_podcast_id_from_episode_name: {str(e)}")
- return None
- finally:
- cursor.close()
-
-
-def mark_episode_completed(cnx, database_type, episode_id, user_id, is_youtube=False):
- cursor = cnx.cursor()
- try:
- if is_youtube:
- # Handle YouTube video
- if database_type == "postgresql":
- duration_query = 'SELECT Duration FROM "YouTubeVideos" WHERE VideoID = %s'
- update_query = 'UPDATE "YouTubeVideos" SET Completed = TRUE WHERE VideoID = %s'
- history_query = '''
- INSERT INTO "UserVideoHistory" (UserID, VideoID, ListenDate, ListenDuration)
- VALUES (%s, %s, NOW(), %s)
- ON CONFLICT (UserID, VideoID)
- DO UPDATE SET ListenDuration = %s, ListenDate = NOW()
- '''
- else:
- duration_query = "SELECT Duration FROM YouTubeVideos WHERE VideoID = %s"
- update_query = "UPDATE YouTubeVideos SET Completed = 1 WHERE VideoID = %s"
- history_query = '''
- INSERT INTO UserVideoHistory (UserID, VideoID, ListenDate, ListenDuration)
- VALUES (%s, %s, NOW(), %s)
- ON DUPLICATE KEY UPDATE
- ListenDuration = %s,
- ListenDate = NOW()
- '''
- else:
- # Original episode logic
- if database_type == "postgresql":
- duration_query = 'SELECT EpisodeDuration FROM "Episodes" WHERE EpisodeID = %s'
- update_query = 'UPDATE "Episodes" SET Completed = TRUE WHERE EpisodeID = %s'
- history_query = '''
- INSERT INTO "UserEpisodeHistory" (UserID, EpisodeID, ListenDate, ListenDuration)
- VALUES (%s, %s, NOW(), %s)
- ON CONFLICT (UserID, EpisodeID)
- DO UPDATE SET ListenDuration = %s, ListenDate = NOW()
- '''
- else:
- duration_query = "SELECT EpisodeDuration FROM Episodes WHERE EpisodeID = %s"
- update_query = "UPDATE Episodes SET Completed = 1 WHERE EpisodeID = %s"
- history_query = '''
- INSERT INTO UserEpisodeHistory (UserID, EpisodeID, ListenDate, ListenDuration)
- VALUES (%s, %s, NOW(), %s)
- ON DUPLICATE KEY UPDATE
- ListenDuration = %s,
- ListenDate = NOW()
- '''
-
- # Get duration
- cursor.execute(duration_query, (episode_id,))
- duration_result = cursor.fetchone()
- if duration_result:
- if isinstance(duration_result, dict):
- duration = duration_result['episodeduration' if not is_youtube else 'duration']
- else: # tuple
- duration = duration_result[0]
- else:
- duration = None
-
- if duration:
- # Update completion status
- cursor.execute(update_query, (episode_id,))
-
- # Update history
- history_params = (user_id, episode_id, duration, duration)
- cursor.execute(history_query, history_params)
-
- cnx.commit()
- except Exception as e:
- cnx.rollback()
- print(f"Error in mark_episode_completed: {str(e)}")
- raise e
- finally:
- cursor.close()
-
-def mark_episode_uncompleted(cnx, database_type, episode_id, user_id, is_youtube=False):
- cursor = cnx.cursor()
- try:
- if is_youtube:
- # Handle YouTube video
- if database_type == "postgresql":
- update_query = 'UPDATE "YouTubeVideos" SET Completed = FALSE WHERE VideoID = %s'
- history_query = '''
- UPDATE "UserVideoHistory"
- SET ListenDuration = 0, ListenDate = NOW()
- WHERE UserID = %s AND VideoID = %s
- '''
- else:
- update_query = "UPDATE YouTubeVideos SET Completed = 0 WHERE VideoID = %s"
- history_query = '''
- UPDATE UserVideoHistory
- SET ListenDuration = 0, ListenDate = NOW()
- WHERE UserID = %s AND VideoID = %s
- '''
- else:
- # Original episode logic
- if database_type == "postgresql":
- update_query = 'UPDATE "Episodes" SET Completed = FALSE WHERE EpisodeID = %s'
- history_query = '''
- UPDATE "UserEpisodeHistory"
- SET ListenDuration = 0, ListenDate = NOW()
- WHERE UserID = %s AND EpisodeID = %s
- '''
- else:
- update_query = "UPDATE Episodes SET Completed = 0 WHERE EpisodeID = %s"
- history_query = '''
- UPDATE UserEpisodeHistory
- SET ListenDuration = 0, ListenDate = NOW()
- WHERE UserID = %s AND EpisodeID = %s
- '''
-
- cursor.execute(update_query, (episode_id,))
- cursor.execute(history_query, (user_id, episode_id))
- cnx.commit()
- finally:
- cursor.close()
-
-
-def enable_auto_download(cnx, database_type, podcast_id, user_id, auto_download):
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- query = 'UPDATE "Podcasts" SET AutoDownload = %s WHERE PodcastID = %s AND UserID = %s'
- else: # MySQL or MariaDB
- query = "UPDATE Podcasts SET AutoDownload = %s WHERE PodcastID = %s AND UserID = %s"
- cursor.execute(query, (auto_download, podcast_id, user_id))
- cnx.commit()
- except Exception as e:
- cnx.rollback()
- raise e
- finally:
- cursor.close()
-
-def set_feed_cutoff(cnx, database_type, podcast_id, user_id, feed_cutoff):
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- query = 'UPDATE "Podcasts" SET FeedCutoffDays = %s WHERE PodcastID = %s AND UserID = %s'
- else: # MySQL or MariaDB
- query = "UPDATE Podcasts SET FeedCutoffDays = %s WHERE PodcastID = %s AND UserID = %s"
- cursor.execute(query, (feed_cutoff, podcast_id, user_id))
- cnx.commit()
- except Exception as e:
- cnx.rollback()
- raise e
- finally:
- cursor.close()
-
-def call_get_auto_download_status(cnx, database_type, podcast_id, user_id):
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- query = 'SELECT AutoDownload FROM "Podcasts" WHERE PodcastID = %s AND UserID = %s'
- else: # MySQL or MariaDB
- query = "SELECT AutoDownload FROM Podcasts WHERE PodcastID = %s AND UserID = %s"
-
- cursor.execute(query, (podcast_id, user_id))
- result = cursor.fetchone()
-
- if result:
- return result[0] if isinstance(result, tuple) else result.get("autodownload")
- else:
- return None
- finally:
- cursor.close()
-
-def set_playback_speed_podcast(cnx, database_type: str, podcast_id: int, playback_speed: float):
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- query = 'UPDATE "Podcasts" SET PlaybackSpeed = %s, PlaybackSpeedCustomized = TRUE WHERE PodcastID = %s'
- else: # MySQL or MariaDB
- query = "UPDATE Podcasts SET PlaybackSpeed = %s, PlaybackSpeedCustomized = TRUE WHERE PodcastID = %s"
-
- cursor.execute(query, (playback_speed, podcast_id))
- cnx.commit()
- except Exception as e:
- cnx.rollback()
- raise e
- finally:
- cursor.close()
-
-def set_playback_speed_user(cnx, database_type: str, user_id: int, playback_speed: float):
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- query = 'UPDATE "Users" SET PlaybackSpeed = %s WHERE UserID = %s'
- else: # MySQL or MariaDB
- query = "UPDATE Users SET PlaybackSpeed = %s WHERE UserID = %s"
- cursor.execute(query, (playback_speed, user_id))
- cnx.commit()
- except Exception as e:
- cnx.rollback()
- raise e
- finally:
- cursor.close()
-
-def adjust_skip_times(cnx, database_type, podcast_id, start_skip, end_skip):
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- query = 'UPDATE "Podcasts" SET StartSkip = %s, EndSkip = %s WHERE PodcastID = %s'
- else: # MySQL or MariaDB
- query = "UPDATE Podcasts SET StartSkip = %s, EndSkip = %s WHERE PodcastID = %s"
- cursor.execute(query, (start_skip, end_skip, podcast_id))
- cnx.commit()
- except Exception as e:
- cnx.rollback()
- raise e
- finally:
- cursor.close()
-
-def get_auto_skip_times(cnx, database_type, podcast_id, user_id):
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- query = """
- SELECT StartSkip, EndSkip
- FROM "Podcasts"
- WHERE PodcastID = %s AND UserID = %s
- """
- else:
- query = """
- SELECT StartSkip, EndSkip
- FROM Podcasts
- WHERE PodcastID = %s AND UserID = %s
- """
-
- cursor.execute(query, (podcast_id, user_id))
- result = cursor.fetchone()
-
- if result:
- if isinstance(result, dict):
- return result.get("startskip"), result.get("endskip")
- elif isinstance(result, tuple):
- return result[0], result[1]
-
- # If no result found (user isn't subscribed), return default values
- return 0, 0
- finally:
- cursor.close()
-
-def get_play_episode_details(cnx, database_type: str, user_id: int, podcast_id: int, is_youtube: bool = False):
- cursor = cnx.cursor(dictionary=True) if database_type != "postgresql" else cnx.cursor()
- try:
- # First get the user's default playback speed
- if database_type == "postgresql":
- user_query = 'SELECT PlaybackSpeed FROM "Users" WHERE UserID = %s'
- else:
- user_query = 'SELECT PlaybackSpeed FROM Users WHERE UserID = %s'
-
- cursor.execute(user_query, (user_id,))
- user_result = cursor.fetchone()
- user_playback_speed = 1.0 # Default fallback
-
- if user_result:
- if isinstance(user_result, dict):
- for key in user_result:
- if key.lower() == 'playbackspeed':
- user_playback_speed = user_result[key] if user_result[key] is not None else 1.0
- break
- else: # Tuple
- user_playback_speed = user_result[0] if user_result[0] is not None else 1.0
-
- # Now get podcast-specific settings
- if database_type == "postgresql":
- podcast_query = '''
- SELECT PlaybackSpeed, PlaybackSpeedCustomized, StartSkip, EndSkip
- FROM "Podcasts"
- WHERE PodcastID = %s AND UserID = %s
- '''
- else:
- podcast_query = '''
- SELECT PlaybackSpeed, PlaybackSpeedCustomized, StartSkip, EndSkip
- FROM Podcasts
- WHERE PodcastID = %s AND UserID = %s
- '''
-
- cursor.execute(podcast_query, (podcast_id, user_id))
- podcast_result = cursor.fetchone()
-
- # Default values
- start_skip = 0
- end_skip = 0
- final_playback_speed = user_playback_speed # Default to user's preference
-
- if podcast_result:
- if isinstance(podcast_result, dict):
- # Case-insensitive lookup for dictionaries
- for key in podcast_result:
- lower_key = key.lower()
- if lower_key == 'startskip':
- start_skip = podcast_result[key] if podcast_result[key] is not None else 0
- elif lower_key == 'endskip':
- end_skip = podcast_result[key] if podcast_result[key] is not None else 0
- elif lower_key == 'playbackspeedcustomized':
- is_customized = podcast_result[key]
- elif lower_key == 'playbackspeed':
- podcast_speed = podcast_result[key] if podcast_result[key] is not None else 1.0
- else: # Tuple result
- start_skip = podcast_result[2] if podcast_result[2] is not None else 0
- end_skip = podcast_result[3] if podcast_result[3] is not None else 0
- is_customized = podcast_result[1]
- podcast_speed = podcast_result[0] if podcast_result[0] is not None else 1.0
-
- # Use podcast's playback speed only if it's been customized
- if is_customized:
- final_playback_speed = podcast_speed
-
- return final_playback_speed, start_skip, end_skip
-
- finally:
- cursor.close()
-
-def clear_podcast_playback_speed(cnx, database_type: str, podcast_id: int, user_id: int) -> bool:
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- query = '''
- UPDATE "Podcasts"
- SET PlaybackSpeed = 1.0, PlaybackSpeedCustomized = FALSE
- WHERE PodcastID = %s AND UserID = %s
- '''
- else:
- query = '''
- UPDATE Podcasts
- SET PlaybackSpeed = 1.0, PlaybackSpeedCustomized = FALSE
- WHERE PodcastID = %s AND UserID = %s
- '''
- cursor.execute(query, (podcast_id, user_id))
- cnx.commit()
- return True
- except Exception as e:
- print(f"Error clearing podcast playback speed: {e}")
- return False
- finally:
- cursor.close()
-
-
-def check_downloaded(cnx, database_type, user_id, content_id, is_youtube=False):
- cursor = cnx.cursor()
-
- if is_youtube:
- if database_type == "postgresql":
- query = 'SELECT 1 FROM "DownloadedVideos" WHERE VideoID = %s AND UserID = %s'
- else:
- query = "SELECT 1 FROM DownloadedVideos WHERE VideoID = %s AND UserID = %s"
- else:
- if database_type == "postgresql":
- query = 'SELECT 1 FROM "DownloadedEpisodes" WHERE EpisodeID = %s AND UserID = %s'
- else:
- query = "SELECT 1 FROM DownloadedEpisodes WHERE EpisodeID = %s AND UserID = %s"
-
- cursor.execute(query, (content_id, user_id))
- result = cursor.fetchone() is not None
- cursor.close()
- return result
-
-
-def get_download_value(result, key, default=None):
- """
- Helper function to extract value from result set.
- It handles both dictionaries and tuples.
- """
- key_lower = key.lower()
- if isinstance(result, dict):
- return result.get(key_lower, default)
- elif isinstance(result, tuple):
- # Define a mapping of field names to their tuple indices for your specific queries
- key_map = {
- "downloadedlocation": 0
- }
- index = key_map.get(key_lower)
- return result[index] if index is not None else default
- return default
-
-def get_youtube_video_location(cnx, database_type, episode_id, user_id):
- cursor = cnx.cursor()
- try:
- logging.info(f"Looking up YouTube video location for episode_id: {episode_id}, user_id: {user_id}")
-
- if database_type == "postgresql":
- query = '''
- SELECT "YouTubeVideos"."youtubevideoid"
- FROM "YouTubeVideos"
- INNER JOIN "Podcasts" ON "YouTubeVideos"."podcastid" = "Podcasts"."podcastid"
- WHERE "YouTubeVideos"."videoid" = %s AND "Podcasts"."userid" = %s
- '''
- else:
- query = '''
- SELECT YouTubeVideos.YouTubeVideoID
- FROM YouTubeVideos
- INNER JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID
- WHERE YouTubeVideos.VideoID = %s AND Podcasts.UserID = %s
- '''
-
- cursor.execute(query, (episode_id, user_id))
- result = cursor.fetchone()
-
- logging.info(f"Query result: {result}")
-
- if result:
- # Handle both dict and tuple results
- youtube_id = result['youtubevideoid'] if isinstance(result, dict) else result[0]
- logging.info(f"Found YouTube ID: {youtube_id}")
-
- file_path = os.path.join('/opt/pinepods/downloads/youtube', f'{youtube_id}.mp3')
- file_path_double = os.path.join('/opt/pinepods/downloads/youtube', f'{youtube_id}.mp3.mp3')
-
- logging.info(f"Checking paths: {file_path} and {file_path_double}")
-
- if os.path.exists(file_path):
- logging.info(f"Found file at {file_path}")
- return file_path
- elif os.path.exists(file_path_double):
- logging.info(f"Found file at {file_path_double}")
- return file_path_double
- else:
- logging.info("No file found at either path")
-
- else:
- logging.info("No YouTube video found in database")
-
- return None
- except Exception as e:
- logging.error(f"Error retrieving YouTube video location: {e}")
- import traceback
- logging.error(f"Traceback: {traceback.format_exc()}")
- return None
- finally:
- cursor.close()
-
-def get_download_location(cnx, database_type, episode_id, user_id):
- cursor = cnx.cursor()
- try:
- # Check if the episode has been downloaded by the user
- if database_type == "postgresql":
- query = 'SELECT DownloadedLocation FROM "DownloadedEpisodes" WHERE EpisodeID = %s AND UserID = %s'
- else:
- query = "SELECT DownloadedLocation FROM DownloadedEpisodes WHERE EpisodeID = %s AND UserID = %s"
-
- cursor.execute(query, (episode_id, user_id))
- result = cursor.fetchone()
-
-
- if result:
- location = get_download_value(result, "DownloadedLocation")
- print(f"DownloadedLocation found: {location}")
- return location
-
- print("No DownloadedLocation found for the given EpisodeID and UserID")
- return None
-
- except Exception as e:
- logging.error(f"Error retrieving DownloadedLocation: {e}")
- return None
-
- finally:
- cursor.close()
-
-def download_episode_list(database_type, cnx, user_id):
- if database_type == "postgresql":
- cnx.row_factory = dict_row
- cursor = cnx.cursor()
- else:
- cursor = cnx.cursor(dictionary=True)
-
- if database_type == "postgresql":
- query = """
- SELECT * FROM (
- SELECT
- "Podcasts".PodcastID as podcastid,
- "Podcasts".PodcastName as podcastname,
- "Podcasts".ArtworkURL as artworkurl,
- "Episodes".EpisodeID as episodeid,
- "Episodes".EpisodeTitle as episodetitle,
- "Episodes".EpisodePubDate as episodepubdate,
- "Episodes".EpisodeDescription as episodedescription,
- "Episodes".EpisodeArtwork as episodeartwork,
- "Episodes".EpisodeURL as episodeurl,
- "Episodes".EpisodeDuration as episodeduration,
- "Podcasts".PodcastIndexID as podcastindexid,
- "Podcasts".WebsiteURL as websiteurl,
- "DownloadedEpisodes".DownloadedLocation as downloadedlocation,
- "UserEpisodeHistory".ListenDuration as listenduration,
- "Episodes".Completed as completed,
- CASE WHEN "SavedEpisodes".EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS saved,
- CASE WHEN "EpisodeQueue".EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS queued,
- TRUE as downloaded,
- FALSE as is_youtube
- FROM "DownloadedEpisodes"
- INNER JOIN "Episodes" ON "DownloadedEpisodes".EpisodeID = "Episodes".EpisodeID
- INNER JOIN "Podcasts" ON "Episodes".PodcastID = "Podcasts".PodcastID
- LEFT JOIN "UserEpisodeHistory" ON
- "DownloadedEpisodes".EpisodeID = "UserEpisodeHistory".EpisodeID
- AND "DownloadedEpisodes".UserID = "UserEpisodeHistory".UserID
- LEFT JOIN "SavedEpisodes" ON
- "DownloadedEpisodes".EpisodeID = "SavedEpisodes".EpisodeID
- AND "SavedEpisodes".UserID = %s
- LEFT JOIN "EpisodeQueue" ON
- "DownloadedEpisodes".EpisodeID = "EpisodeQueue".EpisodeID
- AND "EpisodeQueue".UserID = %s
- AND "EpisodeQueue".is_youtube = FALSE
- WHERE "DownloadedEpisodes".UserID = %s
-
- UNION ALL
-
- SELECT
- "Podcasts".PodcastID as podcastid,
- "Podcasts".PodcastName as podcastname,
- "Podcasts".ArtworkURL as artworkurl,
- "YouTubeVideos".VideoID as episodeid,
- "YouTubeVideos".VideoTitle as episodetitle,
- "YouTubeVideos".PublishedAt as episodepubdate,
- "YouTubeVideos".VideoDescription as episodedescription,
- "YouTubeVideos".ThumbnailURL as episodeartwork,
- "YouTubeVideos".VideoURL as episodeurl,
- "YouTubeVideos".Duration as episodeduration,
- "Podcasts".PodcastIndexID as podcastindexid,
- "Podcasts".WebsiteURL as websiteurl,
- "DownloadedVideos".DownloadedLocation as downloadedlocation,
- "YouTubeVideos".ListenPosition as listenduration,
- "YouTubeVideos".Completed as completed,
- CASE WHEN "SavedVideos".VideoID IS NOT NULL THEN TRUE ELSE FALSE END AS saved,
- CASE WHEN "EpisodeQueue".EpisodeID IS NOT NULL AND "EpisodeQueue".is_youtube = TRUE THEN TRUE ELSE FALSE END AS queued,
- TRUE as downloaded,
- TRUE as is_youtube
- FROM "DownloadedVideos"
- INNER JOIN "YouTubeVideos" ON "DownloadedVideos".VideoID = "YouTubeVideos".VideoID
- INNER JOIN "Podcasts" ON "YouTubeVideos".PodcastID = "Podcasts".PodcastID
- LEFT JOIN "SavedVideos" ON
- "DownloadedVideos".VideoID = "SavedVideos".VideoID
- AND "SavedVideos".UserID = %s
- LEFT JOIN "EpisodeQueue" ON
- "DownloadedVideos".VideoID = "EpisodeQueue".EpisodeID
- AND "EpisodeQueue".UserID = %s
- AND "EpisodeQueue".is_youtube = TRUE
- WHERE "DownloadedVideos".UserID = %s
- ) combined
- ORDER BY episodepubdate DESC
- """
- else: # MySQL or MariaDB
- query = """
- SELECT * FROM (
- SELECT
- Podcasts.PodcastID as podcastid,
- Podcasts.PodcastName as podcastname,
- Podcasts.ArtworkURL as artworkurl,
- Episodes.EpisodeID as episodeid,
- Episodes.EpisodeTitle as episodetitle,
- Episodes.EpisodePubDate as episodepubdate,
- Episodes.EpisodeDescription as episodedescription,
- Episodes.EpisodeArtwork as episodeartwork,
- Episodes.EpisodeURL as episodeurl,
- Episodes.EpisodeDuration as episodeduration,
- Podcasts.PodcastIndexID as podcastindexid,
- Podcasts.WebsiteURL as websiteurl,
- DownloadedEpisodes.DownloadedLocation as downloadedlocation,
- UserEpisodeHistory.ListenDuration as listenduration,
- Episodes.Completed as completed,
- CASE WHEN SavedEpisodes.EpisodeID IS NOT NULL THEN 1 ELSE 0 END AS saved,
- CASE WHEN EpisodeQueue.EpisodeID IS NOT NULL THEN 1 ELSE 0 END AS queued,
- 1 as downloaded,
- 0 as is_youtube
- FROM DownloadedEpisodes
- INNER JOIN Episodes ON DownloadedEpisodes.EpisodeID = Episodes.EpisodeID
- INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID
- LEFT JOIN UserEpisodeHistory ON
- DownloadedEpisodes.EpisodeID = UserEpisodeHistory.EpisodeID
- AND DownloadedEpisodes.UserID = UserEpisodeHistory.UserID
- LEFT JOIN SavedEpisodes ON
- DownloadedEpisodes.EpisodeID = SavedEpisodes.EpisodeID
- AND SavedEpisodes.UserID = %s
- LEFT JOIN EpisodeQueue ON
- DownloadedEpisodes.EpisodeID = EpisodeQueue.EpisodeID
- AND EpisodeQueue.UserID = %s
- AND EpisodeQueue.is_youtube = 0
- WHERE DownloadedEpisodes.UserID = %s
-
- UNION ALL
-
- SELECT
- Podcasts.PodcastID as podcastid,
- Podcasts.PodcastName as podcastname,
- Podcasts.ArtworkURL as artworkurl,
- YouTubeVideos.VideoID as episodeid,
- YouTubeVideos.VideoTitle as episodetitle,
- YouTubeVideos.PublishedAt as episodepubdate,
- YouTubeVideos.VideoDescription as episodedescription,
- YouTubeVideos.ThumbnailURL as episodeartwork,
- YouTubeVideos.VideoURL as episodeurl,
- YouTubeVideos.Duration as episodeduration,
- Podcasts.PodcastIndexID as podcastindexid,
- Podcasts.WebsiteURL as websiteurl,
- DownloadedVideos.DownloadedLocation as downloadedlocation,
- YouTubeVideos.ListenPosition as listenduration,
- YouTubeVideos.Completed as completed,
- CASE WHEN SavedVideos.VideoID IS NOT NULL THEN 1 ELSE 0 END AS saved,
- CASE WHEN EpisodeQueue.EpisodeID IS NOT NULL AND EpisodeQueue.is_youtube = 1 THEN 1 ELSE 0 END AS queued,
- 1 as downloaded,
- 1 as is_youtube
- FROM DownloadedVideos
- INNER JOIN YouTubeVideos ON DownloadedVideos.VideoID = YouTubeVideos.VideoID
- INNER JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID
- LEFT JOIN SavedVideos ON
- DownloadedVideos.VideoID = SavedVideos.VideoID
- AND SavedVideos.UserID = %s
- LEFT JOIN EpisodeQueue ON
- DownloadedVideos.VideoID = EpisodeQueue.EpisodeID
- AND EpisodeQueue.UserID = %s
- AND EpisodeQueue.is_youtube = 1
- WHERE DownloadedVideos.UserID = %s
- ) combined
- ORDER BY episodepubdate DESC
- """
-
- # Now we need 6 parameters: 3 user_ids for each part of the UNION query
- cursor.execute(query, (user_id, user_id, user_id, user_id, user_id, user_id))
- rows = cursor.fetchall()
- cursor.close()
-
- if not rows:
- return None
-
- downloaded_episodes = lowercase_keys(rows)
-
- if database_type != "postgresql":
- bool_fields = ['completed', 'saved', 'queued', 'downloaded', 'is_youtube']
- for episode in downloaded_episodes:
- for field in bool_fields:
- if field in episode:
- episode[field] = bool(episode[field])
- return downloaded_episodes
-
-def save_email_settings(cnx, database_type, email_settings):
- cursor = cnx.cursor()
-
- if database_type == "postgresql":
- # Convert auth_required to boolean for PostgreSQL
- auth_required = bool(int(email_settings['auth_required']))
- query = (
- 'UPDATE "EmailSettings" SET Server_Name = %s, Server_Port = %s, From_Email = %s, Send_Mode = %s, Encryption = %s, Auth_Required = %s, Username = %s, Password = %s WHERE EmailSettingsID = 1')
- else:
- # Keep auth_required as integer for other databases
- auth_required = int(email_settings['auth_required'])
- query = (
- "UPDATE EmailSettings SET Server_Name = %s, Server_Port = %s, From_Email = %s, Send_Mode = %s, Encryption = %s, Auth_Required = %s, Username = %s, Password = %s WHERE EmailSettingsID = 1")
-
- cursor.execute(query, (email_settings['server_name'], email_settings['server_port'], email_settings['from_email'],
- email_settings['send_mode'], email_settings['encryption'],
- auth_required, email_settings['email_username'],
- email_settings['email_password']))
-
- cnx.commit()
- cursor.close()
- # cnx.close()
-
-def get_encryption_key(cnx, database_type):
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = ('SELECT EncryptionKey FROM "AppSettings" WHERE AppSettingsID = 1')
- else:
- query = ("SELECT EncryptionKey FROM AppSettings WHERE AppSettingsID = 1")
- cursor.execute(query)
- result = cursor.fetchone()
-
- if not result:
- cursor.close()
- # cnx.close()
- return None
-
- # Convert the result to a dictionary.
- result_dict = {}
- if isinstance(result, tuple):
- result_dict = {column[0].lower(): value for column, value in zip(cursor.description, result)}
- elif isinstance(result, dict):
- result_dict = {k.lower(): v for k, v in result.items()}
-
- cursor.close()
- # cnx.close()
-
- # Convert the bytearray to a base64 encoded string before returning.
- return base64.b64encode(result_dict['encryptionkey']).decode()
-
-def get_email_settings(cnx, database_type):
- if database_type == "postgresql":
- cursor = cnx.cursor(row_factory=dict_row)
- else:
- cursor = cnx.cursor()
-
- if database_type == "postgresql":
- query = 'SELECT * FROM "EmailSettings"'
- else:
- query = "SELECT * FROM EmailSettings"
-
- cursor.execute(query)
- result = cursor.fetchone()
- cursor.close()
-
- if result:
- if database_type == "postgresql":
- # Normalize keys to PascalCase
- settings_dict = normalize_keys(result, database_type)
- else:
- # For MySQL or MariaDB, convert tuple result to dictionary and keep keys as is
- keys = ["Emailsettingsid", "ServerName", "ServerPort", "FromEmail", "SendMode", "Encryption",
- "AuthRequired", "Username", "Password"]
- settings_dict = dict(zip(keys, result))
-
- # Convert AuthRequired to 0 or 1 if database is PostgreSQL
- if database_type == "postgresql":
- settings_dict["AuthRequired"] = 1 if settings_dict["AuthRequired"] else 0
-
- return settings_dict
- else:
- return None
-
-
-def get_episode_id(cnx, database_type, podcast_id, episode_title, episode_url):
- if database_type == "postgresql":
- cnx.row_factory = dict_row
- cursor = cnx.cursor()
- else: # MySQL or MariaDB
- cursor = cnx.cursor()
-
- if database_type == "postgresql":
- query = 'SELECT EpisodeID FROM "Episodes" WHERE PodcastID = %s AND EpisodeTitle = %s AND EpisodeUrl = %s'
- else: # MySQL or MariaDB
- query = "SELECT EpisodeID FROM Episodes WHERE PodcastID = %s AND EpisodeTitle = %s AND EpisodeUrl = %s"
-
- params = (podcast_id, episode_title, episode_url)
-
- cursor.execute(query, params)
- result = cursor.fetchone()
-
- if result:
- episode_id = result['episodeid'] if database_type == "postgresql" else result[0]
- else:
- # Episode not found, insert a new episode into the Episodes table
- if database_type == "postgresql":
- query = 'INSERT INTO "Episodes" (PodcastID, EpisodeTitle, EpisodeUrl) VALUES (%s, %s, %s) RETURNING EpisodeID'
- else: # MySQL or MariaDB
- query = "INSERT INTO Episodes (PodcastID, EpisodeTitle, EpisodeUrl) VALUES (%s, %s, %s)"
-
- cursor.execute(query, params)
- if database_type == "postgresql":
- episode_id = cursor.fetchone()['EpisodeID']
- else:
- episode_id = cursor.lastrowid
-
- cnx.commit()
- cursor.close()
-
- return episode_id
-
-def get_episode_id_ep_name(cnx, database_type, podcast_title, episode_url):
- if database_type == "postgresql":
- cnx.row_factory = dict_row
- cursor = cnx.cursor()
- query = '''
- SELECT e.EpisodeID
- FROM "Episodes" e
- JOIN "Podcasts" p ON e.PodcastID = p.PodcastID
- WHERE p.PodcastName = %s AND e.EpisodeURL = %s
- '''
- else: # MySQL or MariaDB
- cursor = cnx.cursor()
- query = '''
- SELECT e.EpisodeID
- FROM Episodes e
- JOIN Podcasts p ON e.PodcastID = p.PodcastID
- WHERE p.PodcastName = %s AND e.EpisodeURL = %s
- '''
-
- params = (podcast_title, episode_url)
- cursor.execute(query, params)
- result = cursor.fetchone()
-
- if result:
- episode_id = result['episodeid'] if database_type == "postgresql" else result[0]
- else:
- episode_id = None
- print(f"No match found for Podcast Name: '{podcast_title}' and Episode URL: '{episode_url}'")
-
- cursor.close()
- return episode_id
-
-def get_episode_id_by_url(cnx, database_type, episode_url):
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- query = 'SELECT EpisodeID FROM "Episodes" WHERE EpisodeURL = %s'
- else:
- query = "SELECT EpisodeID FROM Episodes WHERE EpisodeURL = %s"
-
- params = (episode_url,) # Ensure this is a tuple
- cursor.execute(query, params)
- result = cursor.fetchone()
-
- if result:
- # Handle both tuple and dictionary-like results
- if isinstance(result, dict):
- # Try with both camelCase and lowercase keys
- episode_id = result.get("episodeid") or result.get("EpisodeID")
- else: # Assume it's a tuple or tuple-like
- episode_id = result[0]
-
- return episode_id
- return None # No matching episode found
- except Exception as e:
- print(f"Error in get_episode_id_by_url: {e}")
- return None
- finally:
- cursor.close()
-
-
-
-def queue_podcast_entry(cnx, database_type, user_id, episode_title, episode_url):
- if database_type == "postgresql":
- cnx.row_factory = dict_row
- cursor = cnx.cursor()
- else: # MySQL or MariaDB
- cursor = cnx.cursor()
-
- # Get the episode ID using the episode title and URL
- if database_type == "postgresql":
- query = 'SELECT EpisodeID, PodcastID FROM "Episodes" WHERE EpisodeTitle = %s AND EpisodeURL = %s'
- else: # MySQL or MariaDB
- query = "SELECT EpisodeID, PodcastID FROM Episodes WHERE EpisodeTitle = %s AND EpisodeURL = %s"
- cursor.execute(query, (episode_title, episode_url))
- result = cursor.fetchone()
-
- if result:
- episode_id, podcast_id = result['EpisodeID'] if database_type == "postgresql" else result
-
- # Check if the episode is already in the queue
- if database_type == "postgresql":
- query = 'SELECT COUNT(*) FROM "EpisodeQueue" WHERE UserID = %s AND EpisodeID = %s'
- else: # MySQL or MariaDB
- query = "SELECT COUNT(*) FROM EpisodeQueue WHERE UserID = %s AND EpisodeID = %s"
- cursor.execute(query, (user_id, episode_id))
- count = cursor.fetchone()[0]
-
- if count > 0:
- # Episode is already in the queue, move it to position 1 and update the QueueDate
- if database_type == "postgresql":
- query = 'UPDATE "EpisodeQueue" SET QueuePosition = 1, QueueDate = CURRENT_TIMESTAMP WHERE UserID = %s AND EpisodeID = %s'
- else: # MySQL or MariaDB
- query = "UPDATE EpisodeQueue SET QueuePosition = 1, QueueDate = CURRENT_TIMESTAMP WHERE UserID = %s AND EpisodeID = %s"
- cursor.execute(query, (user_id, episode_id))
- cnx.commit()
- else:
- # Episode is not in the queue, insert it at position 1
- if database_type == "postgresql":
- query = 'INSERT INTO "EpisodeQueue" (UserID, EpisodeID, QueuePosition) VALUES (%s, %s, 1)'
- else: # MySQL or MariaDB
- query = "INSERT INTO EpisodeQueue (UserID, EpisodeID, QueuePosition) VALUES (%s, %s, 1)"
- cursor.execute(query, (user_id, episode_id))
- cnx.commit()
-
- cursor.close()
- return True
- else:
- # Episode not found in the database
- cursor.close()
- return False
-
-
-def episode_remove_queue(cnx, database_type, user_id, url, title):
- cursor = cnx.cursor()
-
- # Get the episode ID using the episode title and URL
- if database_type == "postgresql":
- query = 'SELECT EpisodeID FROM "Episodes" WHERE EpisodeTitle = %s AND EpisodeURL = %s'
- else:
- query = "SELECT EpisodeID FROM Episodes WHERE EpisodeTitle = %s AND EpisodeURL = %s"
- cursor.execute(query, (title, url))
- episode_id = cursor.fetchone()
-
- if episode_id:
- # Remove the episode from the user's queue
-
- if database_type == "postgresql":
- query = 'DELETE FROM "EpisodeQueue" WHERE UserID = %s AND EpisodeID = %s'
- else:
- query = "DELETE FROM EpisodeQueue WHERE UserID = %s AND EpisodeID = %s"
- cursor.execute(query, (user_id, episode_id[0])) # Extract the episode ID from the tuple
- cnx.commit()
-
- cursor.close()
- # cnx.close()
-
- return True
- else:
- # Episode not found in the database
- cursor.close()
- # cnx.close()
- return False
-
-
-def check_usernames(cnx, database_type, username):
- cursor = cnx.cursor()
- if database_type == 'postgresql':
- query = ('SELECT COUNT(*) FROM "Users" WHERE Username = %s')
- else:
- query = ("SELECT COUNT(*) FROM Users WHERE Username = %s")
- cursor.execute(query, (username,))
- count = cursor.fetchone()[0]
- cursor.close()
- # cnx.close()
- return count > 0
-
-def record_listen_duration(cnx, database_type, episode_id, user_id, listen_duration):
- if listen_duration < 0:
- logging.info(f"Skipped updating listen duration for user {user_id} and episode {episode_id} due to invalid duration: {listen_duration}")
- return
- listen_date = datetime.datetime.now()
- cursor = cnx.cursor()
-
- try:
- # Check if UserEpisodeHistory row already exists for the given user and episode
- if database_type == "postgresql":
- cursor.execute('SELECT ListenDuration FROM "UserEpisodeHistory" WHERE UserID=%s AND EpisodeID=%s', (user_id, episode_id))
- else:
- cursor.execute("SELECT ListenDuration FROM UserEpisodeHistory WHERE UserID=%s AND EpisodeID=%s", (user_id, episode_id))
- result = cursor.fetchone()
- if result is not None:
- existing_duration = result[0] if isinstance(result, tuple) else result.get("ListenDuration")
- # Ensure existing_duration is not None
- existing_duration = existing_duration if existing_duration is not None else 0
- # Update only if the new duration is greater than the existing duration
- if listen_duration > existing_duration:
- if database_type == "postgresql":
- update_listen_duration = 'UPDATE "UserEpisodeHistory" SET ListenDuration=%s, ListenDate=%s WHERE UserID=%s AND EpisodeID=%s'
- else:
- update_listen_duration = "UPDATE UserEpisodeHistory SET ListenDuration=%s, ListenDate=%s WHERE UserID=%s AND EpisodeID=%s"
- cursor.execute(update_listen_duration, (listen_duration, listen_date, user_id, episode_id))
- else:
- print(f"No update required for user {user_id} and episode {episode_id} as existing duration {existing_duration} is greater than or equal to new duration {listen_duration}")
- else:
- # Insert new row
- if database_type == "postgresql":
- add_listen_duration = 'INSERT INTO "UserEpisodeHistory" (UserID, EpisodeID, ListenDate, ListenDuration) VALUES (%s, %s, %s, %s)'
- else:
- add_listen_duration = "INSERT INTO UserEpisodeHistory (UserID, EpisodeID, ListenDate, ListenDuration) VALUES (%s, %s, %s, %s)"
- cursor.execute(add_listen_duration, (user_id, episode_id, listen_date, listen_duration))
- cnx.commit()
- except Exception as e:
- logging.error(f"Failed to record listen duration due to: {e}")
- cnx.rollback()
- finally:
- cursor.close()
- # cnx.close()
-
-
-def record_youtube_listen_duration(cnx, database_type, video_id, user_id, listen_duration):
- if listen_duration < 0:
- logging.info(f"Skipped updating listen duration for user {user_id} and video {video_id} due to invalid duration: {listen_duration}")
- return
-
- listen_date = datetime.datetime.now()
- cursor = cnx.cursor()
- try:
- # Check if UserVideoHistory exists (we'll need to create this table)
- if database_type == "postgresql":
- cursor.execute('SELECT ListenDuration FROM "UserVideoHistory" WHERE UserID=%s AND VideoID=%s', (user_id, video_id))
- else:
- cursor.execute("SELECT ListenDuration FROM UserVideoHistory WHERE UserID=%s AND VideoID=%s", (user_id, video_id))
-
- result = cursor.fetchone()
-
- if result is not None:
- existing_duration = result[0] if isinstance(result, tuple) else result.get("ListenDuration")
- existing_duration = existing_duration if existing_duration is not None else 0
-
- if listen_duration > existing_duration:
- if database_type == "postgresql":
- update_listen_duration = 'UPDATE "UserVideoHistory" SET ListenDuration=%s, ListenDate=%s WHERE UserID=%s AND VideoID=%s'
- else:
- update_listen_duration = "UPDATE UserVideoHistory SET ListenDuration=%s, ListenDate=%s WHERE UserID=%s AND VideoID=%s"
- cursor.execute(update_listen_duration, (listen_duration, listen_date, user_id, video_id))
-
- # Also update the ListenPosition in YouTubeVideos table
- if database_type == "postgresql":
- cursor.execute('UPDATE "YouTubeVideos" SET ListenPosition=%s WHERE VideoID=%s',
- (listen_duration, video_id))
- else:
- cursor.execute("UPDATE YouTubeVideos SET ListenPosition=%s WHERE VideoID=%s",
- (listen_duration, video_id))
- else:
- # Insert new row
- if database_type == "postgresql":
- add_listen_duration = 'INSERT INTO "UserVideoHistory" (UserID, VideoID, ListenDate, ListenDuration) VALUES (%s, %s, %s, %s)'
- else:
- add_listen_duration = "INSERT INTO UserVideoHistory (UserID, VideoID, ListenDate, ListenDuration) VALUES (%s, %s, %s, %s)"
- cursor.execute(add_listen_duration, (user_id, video_id, listen_date, listen_duration))
-
- # Update ListenPosition in YouTubeVideos
- if database_type == "postgresql":
- cursor.execute('UPDATE "YouTubeVideos" SET ListenPosition=%s WHERE VideoID=%s',
- (listen_duration, video_id))
- else:
- cursor.execute("UPDATE YouTubeVideos SET ListenPosition=%s WHERE VideoID=%s",
- (listen_duration, video_id))
-
- cnx.commit()
- except Exception as e:
- logging.error(f"Failed to record YouTube listen duration due to: {e}")
- cnx.rollback()
- finally:
- cursor.close()
-
-
-def get_local_episode_times(cnx, database_type, user_id):
- if database_type == "postgresql":
- cnx.row_factory = dict_row
- cursor = cnx.cursor()
- else: # MySQL or MariaDB
- cursor = cnx.cursor(dictionary=True)
-
- if database_type == "postgresql":
- cursor.execute("""
- SELECT
- e.EpisodeURL,
- p.FeedURL,
- ueh.ListenDuration,
- e.EpisodeDuration,
- e.Completed
- FROM "UserEpisodeHistory" ueh
- JOIN "Episodes" e ON ueh.EpisodeID = e.EpisodeID
- JOIN "Podcasts" p ON e.PodcastID = p.PodcastID
- WHERE ueh.UserID = %s
- """, (user_id,))
- else: # MySQL or MariaDB
- cursor.execute("""
- SELECT
- e.EpisodeURL as episode_url,
- p.FeedURL as podcast_url,
- ueh.ListenDuration as listen_duration,
- e.EpisodeDuration as episode_duration,
- e.Completed as completed
- FROM UserEpisodeHistory ueh
- JOIN Episodes e ON ueh.EpisodeID = e.EpisodeID
- JOIN Podcasts p ON e.PodcastID = p.PodcastID
- WHERE ueh.UserID = %s
- """, (user_id,))
-
- # Handle different return types
- episode_times = []
- for row in cursor.fetchall():
- if isinstance(row, dict):
- # For MySQL with dictionary=True or PostgreSQL with dict_row
- if database_type == "postgresql":
- # PostgreSQL keys match the original column names
- episode_times.append({
- "episode_url": row["episodeurl"],
- "podcast_url": row["feedurl"],
- "listen_duration": row["listenduration"],
- "episode_duration": row["episodeduration"],
- "completed": row["completed"]
- })
- else:
- # MySQL's column aliases should match our expected keys
- episode_times.append({
- "episode_url": row["episode_url"],
- "podcast_url": row["podcast_url"],
- "listen_duration": row["listen_duration"],
- "episode_duration": row["episode_duration"],
- "completed": row["completed"]
- })
- else:
- # Handle tuple responses
- episode_times.append({
- "episode_url": row[0],
- "podcast_url": row[1],
- "listen_duration": row[2],
- "episode_duration": row[3],
- "completed": row[4]
- })
-
- cursor.close()
- return episode_times
-
-
-
-def generate_guid(episode_time):
- import uuid
- # Concatenate the podcast and episode URLs to form a unique string for each episode
- unique_string = episode_time["podcast_url"] + episode_time["episode_url"]
- # Generate a UUID based on the MD5 hash of the unique string
- guid = uuid.uuid3(uuid.NAMESPACE_URL, unique_string)
- return str(guid)
-
-def get_playback_speed(cnx, database_type: str, user_id: int, is_youtube: bool, podcast_id: Optional[int] = None) -> float:
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- if podcast_id is None:
- query = 'SELECT PlaybackSpeed FROM "Users" WHERE UserID = %s'
- cursor.execute(query, (user_id,))
- else:
- query = 'SELECT PlaybackSpeed FROM "Podcasts" WHERE PodcastID = %s'
- cursor.execute(query, (podcast_id,))
- else:
- if podcast_id is None:
- query = 'SELECT PlaybackSpeed FROM Users WHERE UserID = %s'
- cursor.execute(query, (user_id,))
- else:
- query = 'SELECT PlaybackSpeed FROM Podcasts WHERE PodcastID = %s'
- cursor.execute(query, (podcast_id,))
-
- result = cursor.fetchone()
-
- if result:
- # Handle dictionary return type (case-insensitive lookup)
- if isinstance(result, dict):
- # Try to find the key in a case-insensitive way
- for key in result:
- if key.lower() == 'playbackspeed':
- return result[key] if result[key] is not None else 1.0
- # If we get here, the key wasn't found
- return 1.0
- # Handle tuple return type
- else:
- return result[0] if result[0] is not None else 1.0
- return 1.0
- finally:
- cursor.close()
-
-
-def check_episode_playback(cnx, database_type, user_id, episode_title, episode_url):
- if database_type == "postgresql":
- cnx.row_factory = dict_row
- cursor = cnx.cursor()
- else: # MySQL or MariaDB
- cursor = cnx.cursor()
-
- try:
- # Get the EpisodeID from the Episodes table
- if database_type == "postgresql":
- query = """
- SELECT e.EpisodeID
- FROM "Episodes" e
- JOIN "Podcasts" p ON e.PodcastID = p.PodcastID
- WHERE e.EpisodeTitle = %s AND e.EpisodeURL = %s AND p.UserID = %s
- """
- else: # MySQL or MariaDB
- query = """
- SELECT e.EpisodeID
- FROM Episodes e
- JOIN Podcasts p ON e.PodcastID = p.PodcastID
- WHERE e.EpisodeTitle = %s AND e.EpisodeURL = %s AND p.UserID = %s
- """
- cursor.execute(query, (episode_title, episode_url, user_id))
- result = cursor.fetchone()
-
- # Check if the EpisodeID is None
- if result is None:
- return False, 0
-
- episode_id = result['EpisodeID'] if database_type == "postgresql" else result[0]
-
- # Check if the user has played the episode before
- if database_type == "postgresql":
- query = 'SELECT ListenDuration FROM "UserEpisodeHistory" WHERE UserID = %s AND EpisodeID = %s'
- else: # MySQL or MariaDB
- query = "SELECT ListenDuration FROM UserEpisodeHistory WHERE UserID = %s AND EpisodeID = %s"
- cursor.execute(query, (user_id, episode_id))
- result = cursor.fetchone()
-
- if result:
- listen_duration = result['ListenDuration'] if database_type == "postgresql" else result[0]
- return True, listen_duration
- else:
- return False, 0
- except (psycopg.errors.InterfaceError, mysql.connector.errors.InterfaceError):
- return False, 0
- finally:
- if cursor:
- cursor.close()
-
-
-
-# def get_episode_listen_time(cnx, user_id, title, url):
-# cursor = None
-# try:
-# cursor = cnx.cursor()
-
-# # Get the EpisodeID from the Episodes table
-# query = "SELECT EpisodeID FROM Episodes WHERE EpisodeTitle = %s AND EpisodeURL = %s"
-# cursor.execute(query, (title, url))
-# episode_id = cursor.fetchone()[0]
-
-# # Get the user's listen duration for this episode
-# query = "SELECT ListenDuration FROM UserEpisodeHistory WHERE UserID = %s AND EpisodeID = %s"
-# cursor.execute(query, (user_id, episode_id))
-# listen_duration = cursor.fetchone()[0]
-
-# return listen_duration
-
-# # Seek to the user's last listen duration
-# # current_episode.seek_to_second(listen_duration)
-
-# finally:
-# if cursor:
-# cursor.close()
-# # cnx.close()
-
-
-def get_theme(cnx, database_type, user_id):
- cursor = None
- try:
- cursor = cnx.cursor()
-
- # Get the EpisodeID from the Episodes table
- if database_type == 'postgresql':
- query = 'SELECT Theme FROM "UserSettings" WHERE UserID = %s'
- else:
- query = "SELECT Theme FROM UserSettings WHERE UserID = %s"
- cursor.execute(query, (user_id,))
- result = cursor.fetchone()
- # Check the type of the result and access the theme accordingly
- if isinstance(result, dict):
- theme = result["theme"]
- else:
- theme = result[0]
-
- return theme
-
- finally:
- if cursor:
- cursor.close()
- # cnx.close()
-
-
-def set_theme(cnx, database_type, user_id, theme):
- cursor = None
- try:
- cursor = cnx.cursor()
-
- # Update the UserSettings table with the new theme value
- if database_type == 'postgresql':
- query = 'UPDATE "UserSettings" SET Theme = %s WHERE UserID = %s'
- else:
- query = "UPDATE UserSettings SET Theme = %s WHERE UserID = %s"
- cursor.execute(query, (theme, user_id))
- cnx.commit()
-
- finally:
- if cursor:
- cursor.close()
- # cnx.close(
-
-
-def get_user_info(database_type, cnx):
- try:
- if database_type == "postgresql":
- cnx.row_factory = dict_row
- cursor = cnx.cursor()
- query = 'SELECT UserID, Fullname, Username, Email, CASE WHEN IsAdmin THEN 1 ELSE 0 END AS IsAdmin FROM "Users"'
- else: # MySQL or MariaDB
- cursor = cnx.cursor(dictionary=True)
- query = "SELECT UserID, Fullname, Username, Email, IsAdmin FROM Users"
-
- cursor.execute(query)
- rows = cursor.fetchall()
-
- if not rows:
- return None
-
- if database_type != "postgresql":
- # Convert column names to lowercase for MySQL
- rows = [{k.lower(): v for k, v in row.items()} for row in rows]
-
- return rows
-
- except Exception as e:
- print(f"Error getting user info: {e}")
- return None
-
- finally:
- if cursor:
- cursor.close()
-
-
-def get_my_user_info(database_type, cnx, user_id):
- try:
- if database_type == "postgresql":
- cnx.row_factory = dict_row
- cursor = cnx.cursor()
- query = '''
- SELECT UserID, Fullname, Username, Email,
- CASE WHEN IsAdmin THEN 1 ELSE 0 END AS IsAdmin
- FROM "Users"
- WHERE UserID = %s
- '''
- else: # MySQL or MariaDB
- cursor = cnx.cursor(dictionary=True)
- query = """
- SELECT UserID, Fullname, Username, Email, IsAdmin
- FROM Users
- WHERE UserID = %s
- """
- cursor.execute(query, (user_id,))
- row = cursor.fetchone()
-
- if not row:
- return None
-
- # Handle both dict and tuple cases
- if isinstance(row, dict):
- # For MySQL, convert keys to lowercase
- if database_type != "postgresql":
- return {k.lower(): v if v is not None else "" for k, v in row.items()}
- return {k: v if v is not None else "" for k, v in row.items()}
- else:
- # Handle tuple case by creating dict with known column order
- columns = ['userid', 'fullname', 'username', 'email', 'isadmin']
- return {columns[i]: v if v is not None else "" for i, v in enumerate(row)}
-
- except Exception as e:
- print(f"Error getting user info: {e}")
- return None
- finally:
- if cursor:
- cursor.close()
-
-def get_api_info(database_type, cnx, user_id):
- # Check if the user is an admin
- if database_type == "postgresql":
- cursor = cnx.cursor()
- is_admin_query = 'SELECT IsAdmin FROM "Users" WHERE UserID = %s'
- else: # MySQL or MariaDB
- cursor = cnx.cursor()
- is_admin_query = "SELECT IsAdmin FROM Users WHERE UserID = %s"
-
- cursor.execute(is_admin_query, (user_id,))
- is_admin_result = cursor.fetchone()
- cursor.close()
-
- # Adjusting access based on the result type
- is_admin = is_admin_result[0] if isinstance(is_admin_result, tuple) else is_admin_result["isadmin"] if is_admin_result else 0
-
- # Adjust the query based on whether the user is an admin
- if database_type == "postgresql":
- cnx.row_factory = dict_row
- cursor = cnx.cursor()
- query = (
- 'SELECT APIKeyID, "APIKeys".UserID, Username, RIGHT(APIKey, 4) as LastFourDigits, Created, ARRAY[]::integer[] AS PodcastIDs '
- 'FROM "APIKeys" '
- 'JOIN "Users" ON "APIKeys".UserID = "Users".UserID '
- )
- else: # MySQL or MariaDB
- cursor = cnx.cursor(dictionary=True)
- query = (
- "SELECT APIKeyID, APIKeys.UserID, Username, RIGHT(APIKey, 4) as LastFourDigits, Created, '' AS PodcastIDs "
- "FROM APIKeys "
- "JOIN Users ON APIKeys.UserID = Users.UserID "
- )
-
- # Append condition to query if the user is not an admin
- if not is_admin:
- if database_type == 'postgresql':
- query += 'WHERE "APIKeys".UserID = %s'
- else:
- query += "WHERE APIKeys.UserID = %s"
-
- # TODO: remove after testing
- if database_type == 'postgresql':
- query += '''
- UNION ALL
- SELECT "RssKeys".RssKeyID, "RssKeys".UserID, "Users".Username, RIGHT("RssKeys".RssKey, 4) as LastFourDigits, "RssKeys".Created, ARRAY_AGG("RssKeyMap".PodcastID) as PodcastIDs
- FROM "RssKeys"
- JOIN "Users" ON "RssKeys".UserID = "Users".UserID
- JOIN "RssKeyMap" ON "RssKeys".RssKeyID = "RssKeyMap".RssKeyID
- GROUP BY "RssKeys".RssKeyID, "RssKeys".UserID, "Users".Username, "RssKeys".RssKey, "RssKeys".Created
- '''
- else:
- query += '''
- UNION ALL
- SELECT RssKeys.RssKeyID, RssKeys.UserID, Users.Username, RIGHT(RssKeys.RssKey, 4) as LastFourDigits, RssKeys.Created, GROUP_CONCAT(CAST(RssKeyMap.PodcastID AS CHAR)) as PodcastIDs
- FROM RssKeys
- JOIN Users ON RssKeys.UserID = Users.UserID
- JOIN RssKeyMap ON RssKeys.RssKeyID = RssKeyMap.RssKeyID
- GROUP BY RssKeys.RssKeyID, RssKeys.UserID, Users.Username, RssKeys.RssKey, RssKeys.Created
- '''
-
- if not is_admin:
- if database_type == 'postgresql':
- query += 'WHERE "RssKeys".UserID = %s'
- else:
- query += 'WHERE RssKeys.UserID = %s'
-
- cursor.execute(query, (user_id, user_id) if not is_admin else ())
- rows = cursor.fetchall()
- cursor.close()
-
- if not rows:
- return []
-
- if database_type != "postgresql":
- # Convert column names to lowercase for MySQL
- rows = [{k.lower(): v for k, v in row.items()} for row in rows]
-
- return rows
-
-def create_api_key(cnx, database_type: str, user_id: int):
- import secrets
- import string
- alphabet = string.ascii_letters + string.digits
- api_key = ''.join(secrets.choice(alphabet) for _ in range(64))
-
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'INSERT INTO "APIKeys" (UserID, APIKey) VALUES (%s, %s)'
- else: # MySQL or MariaDB
- query = "INSERT INTO APIKeys (UserID, APIKey) VALUES (%s, %s)"
-
- cursor.execute(query, (user_id, api_key))
- cnx.commit()
- cursor.close()
-
- return api_key
-
-def create_rss_key(cnx, database_type: str, user_id: int, podcast_ids: list[int] = None):
- import secrets
- import string
- alphabet = string.ascii_letters + string.digits
- api_key = ''.join(secrets.choice(alphabet) for _ in range(64))
-
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- query = 'INSERT INTO "RssKeys" (UserID, RssKey) VALUES (%s, %s) RETURNING RssKeyID'
- cursor.execute(query, (user_id, api_key))
- result = cursor.fetchone()
- if not result:
- raise Exception("Failed to create RSS key - no ID returned")
-
- # Handle both tuple and dict return types
- if isinstance(result, dict):
- rss_key_id = result.get('rsskeyid') or result.get('RssKeyID')
- else:
- rss_key_id = result[0]
-
- if not rss_key_id:
- raise Exception("Failed to create RSS key - invalid ID returned")
- else:
- query = "INSERT INTO RssKeys (UserID, RssKey) VALUES (%s, %s)"
- cursor.execute(query, (user_id, api_key))
- rss_key_id = cursor.lastrowid
- if not rss_key_id:
- raise Exception("Failed to create RSS key - no lastrowid")
-
- if podcast_ids and len(podcast_ids) > 0 and -1 not in podcast_ids:
- for podcast_id in podcast_ids:
- if database_type == "postgresql":
- query = 'INSERT INTO "RssKeyMap" (RssKeyID, PodcastID) VALUES (%s, %s)'
- else:
- query = 'INSERT INTO RssKeyMap (RssKeyID, PodcastID) VALUES (%s, %s)'
- cursor.execute(query, (rss_key_id, podcast_id))
-
- cnx.commit()
- return api_key
- except Exception as e:
- logging.error(f"Error creating RSS key for user {user_id}: {e}")
- cnx.rollback()
- raise
- finally:
- cursor.close()
-
-def set_rss_key_podcasts(cnx, database_type: str, rss_key_id: int, podcast_ids: list[int]):
- cursor = cnx.cursor()
- # delete existing podcast ids
- if database_type == "postgresql":
- query = 'DELETE FROM "RssKeyMap" WHERE RssKeyID = %s'
- else:
- query = 'DELETE FROM RssKeyMap WHERE RssKeyID = %s'
- cursor.execute(query, (rss_key_id,))
-
- # insert new podcast ids
- for podcast_id in podcast_ids:
- if database_type == "postgresql":
- query = 'INSERT INTO "RssKeyMap" (RssKeyID, PodcastID) VALUES (%s, %s)'
- else:
- query = 'INSERT INTO RssKeyMap (RssKeyID, PodcastID) VALUES (%s, %s)'
- cursor.execute(query, (rss_key_id, podcast_id))
-
- cnx.commit()
- cursor.close()
-
-
-def get_user_api_key(cnx, database_type, user_id):
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- query = """
- SELECT APIKey
- FROM "APIKeys"
- WHERE UserID = %s
- ORDER BY Created DESC
- LIMIT 1
- """
- else:
- query = """
- SELECT APIKey
- FROM APIKeys
- WHERE UserID = %s
- ORDER BY Created DESC
- LIMIT 1
- """
- cursor.execute(query, (user_id,))
- result = cursor.fetchone()
- if result:
- return result[0] if isinstance(result, tuple) else result['apikey']
- return None
- finally:
- cursor.close()
-
-
-def is_same_api_key(cnx, database_type, api_id, api_key):
- if database_type == "postgresql":
- cursor = cnx.cursor()
- query = 'SELECT APIKey FROM "APIKeys" WHERE APIKeyID = %s'
- else: # MySQL or MariaDB
- cursor = cnx.cursor(dictionary=True)
- query = "SELECT APIKey FROM APIKeys WHERE APIKeyID = %s"
-
- cursor.execute(query, (api_id,))
- result = cursor.fetchone()
- cursor.close()
-
- if result:
- if isinstance(result, tuple):
- # Convert tuple to dictionary
- result = dict(zip([desc[0] for desc in cursor.description], result))
- if database_type == 'postgresql':
- if result.get('apikey') == api_key:
- return True
- else:
- if result.get('APIKey') == api_key:
- return True
- return False
-
-
-def belongs_to_guest_user(cnx, database_type, api_id):
- if database_type == "postgresql":
- cursor = cnx.cursor()
- query = 'SELECT UserID FROM "APIKeys" WHERE APIKeyID = %s'
- else: # MySQL or MariaDB
- cursor = cnx.cursor(dictionary=True)
- query = "SELECT UserID FROM APIKeys WHERE APIKeyID = %s"
-
- cursor.execute(query, (api_id,))
- result = cursor.fetchone()
- cursor.close()
-
- if result:
- if isinstance(result, tuple):
- # Convert tuple to dictionary
- result = dict(zip([desc[0] for desc in cursor.description], result))
- if database_type == 'postgresql':
- return result.get('userid') == 1
- else:
- return result.get('UserID') == 1
- return False
-
-
-def delete_api(cnx, database_type, api_id):
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'DELETE FROM "APIKeys" WHERE APIKeyID = %s'
- else: # MySQL or MariaDB
- query = "DELETE FROM APIKeys WHERE APIKeyID = %s"
-
- cursor.execute(query, (api_id,))
- cnx.commit()
- cursor.close()
-
-
-
-def set_username(cnx, database_type, user_id, new_username):
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'UPDATE "Users" SET Username = %s WHERE UserID = %s'
- else: # MySQL or MariaDB
- query = "UPDATE Users SET Username = %s WHERE UserID = %s"
-
- cursor.execute(query, (new_username, user_id))
- cnx.commit()
- cursor.close()
-
-
-
-def set_password(cnx, database_type, user_id, hash_pw):
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'UPDATE "Users" SET Hashed_PW = %s WHERE UserID = %s'
- else: # MySQL or MariaDB
- query = "UPDATE Users SET Hashed_PW = %s WHERE UserID = %s"
-
- cursor.execute(query, (hash_pw, user_id))
- cnx.commit()
- cursor.close()
-
-
-
-
-def set_email(cnx, database_type, user_id, new_email):
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'UPDATE "Users" SET Email = %s WHERE UserID = %s'
- else: # MySQL or MariaDB
- query = "UPDATE Users SET Email = %s WHERE UserID = %s"
-
- cursor.execute(query, (new_email, user_id))
- cnx.commit()
- cursor.close()
-
-
-
-def set_fullname(cnx, database_type, user_id, new_name):
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'UPDATE "Users" SET Fullname = %s WHERE UserID = %s'
- else: # MySQL or MariaDB
- query = "UPDATE Users SET Fullname = %s WHERE UserID = %s"
-
- cursor.execute(query, (new_name, user_id))
- cnx.commit()
- cursor.close()
-
-
-
-def set_isadmin(cnx, database_type, user_id, isadmin):
- cursor = cnx.cursor()
-
- if database_type == "postgresql":
- query = 'UPDATE "Users" SET IsAdmin = %s WHERE UserID = %s'
- # For PostgreSQL, use boolean directly instead of converting to int
- cursor.execute(query, (isadmin, user_id))
- else: # MySQL or MariaDB
- query = "UPDATE Users SET IsAdmin = %s WHERE UserID = %s"
- isadmin_int = int(isadmin)
- cursor.execute(query, (isadmin_int, user_id))
-
- cnx.commit()
- cursor.close()
-
-
-
-def delete_user(cnx, database_type, user_id):
- cursor = cnx.cursor()
-
- # Delete user from UserEpisodeHistory table
- try:
- if database_type == "postgresql":
- query = 'DELETE FROM "UserEpisodeHistory" WHERE UserID = %s'
- else: # MySQL or MariaDB
- query = "DELETE FROM UserEpisodeHistory WHERE UserID = %s"
- cursor.execute(query, (user_id,))
- except Exception as e:
- print(f"Error deleting from UserEpisodeHistory: {e}")
-
- # Delete user from DownloadedEpisodes table
- try:
- if database_type == "postgresql":
- query = 'DELETE FROM "DownloadedEpisodes" WHERE UserID = %s'
- else: # MySQL or MariaDB
- query = "DELETE FROM DownloadedEpisodes WHERE UserID = %s"
- cursor.execute(query, (user_id,))
- except Exception as e:
- print(f"Error deleting from DownloadedEpisodes: {e}")
-
- # Delete user from EpisodeQueue table
- try:
- if database_type == "postgresql":
- query = 'DELETE FROM "EpisodeQueue" WHERE UserID = %s'
- else: # MySQL or MariaDB
- query = "DELETE FROM EpisodeQueue WHERE UserID = %s"
- cursor.execute(query, (user_id,))
- except Exception as e:
- print(f"Error deleting from EpisodeQueue: {e}")
-
- # Delete user from Podcasts table
- try:
- if database_type == "postgresql":
- query = 'DELETE FROM "Podcasts" WHERE UserID = %s'
- else: # MySQL or MariaDB
- query = "DELETE FROM Podcasts WHERE UserID = %s"
- cursor.execute(query, (user_id,))
- except Exception as e:
- print(f"Error deleting from Podcasts: {e}")
-
- # Delete user from UserSettings table
- try:
- if database_type == "postgresql":
- query = 'DELETE FROM "UserSettings" WHERE UserID = %s'
- else: # MySQL or MariaDB
- query = "DELETE FROM UserSettings WHERE UserID = %s"
- cursor.execute(query, (user_id,))
- except Exception as e:
- print(f"Error deleting from UserSettings: {e}")
-
- # Delete user from UserStats table
- try:
- if database_type == "postgresql":
- query = 'DELETE FROM "UserStats" WHERE UserID = %s'
- else: # MySQL or MariaDB
- query = "DELETE FROM UserStats WHERE UserID = %s"
- cursor.execute(query, (user_id,))
- except Exception as e:
- print(f"Error deleting from UserStats: {e}")
-
- # Delete user from Users table
- if database_type == "postgresql":
- query = 'DELETE FROM "Users" WHERE UserID = %s'
- else: # MySQL or MariaDB
- query = "DELETE FROM Users WHERE UserID = %s"
- cursor.execute(query, (user_id,))
- cnx.commit()
-
- cursor.close()
-
-
-
-def user_admin_check(cnx, database_type, user_id):
-
- logging.info(f"Checking admin status for user ID: {user_id}, database type: {database_type}")
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'SELECT IsAdmin FROM "Users" WHERE UserID = %s'
- else: # MySQL or MariaDB
- query = "SELECT IsAdmin FROM Users WHERE UserID = %s"
-
- cursor.execute(query, (user_id,))
- result = cursor.fetchone()
- cursor.close()
-
- logging.info(f"Query result: {result}")
-
- if result is None:
- logging.warning(f"No result found for user ID: {user_id}")
- return False
-
- try:
- return bool(result[0] if isinstance(result, tuple) else result['isadmin'])
- except KeyError as e:
- logging.error(f"KeyError: {e} - Result: {result}")
- return False
-
-def final_admin(cnx, database_type, user_id):
- cursor = cnx.cursor()
-
- if database_type == "postgresql":
- query = 'SELECT COUNT(*) FROM "Users" WHERE IsAdmin = TRUE'
- else: # MySQL or MariaDB
- query = "SELECT COUNT(*) FROM Users WHERE IsAdmin = 1"
-
- cursor.execute(query)
- result = cursor.fetchone()
- # Handle both tuple and dict results
- admin_count = result[0] if isinstance(result, tuple) else result['count']
-
- if admin_count == 1:
- if database_type == "postgresql":
- query = 'SELECT IsAdmin FROM "Users" WHERE UserID = %s'
- else: # MySQL or MariaDB
- query = "SELECT IsAdmin FROM Users WHERE UserID = %s"
-
- cursor.execute(query, (user_id,))
- result = cursor.fetchone()
- # Handle both tuple and dict results
- is_admin = result[0] if isinstance(result, tuple) else result['isadmin']
-
- # For PostgreSQL boolean or MySQL/MariaDB int
- if is_admin:
- return True
-
- cursor.close()
- return False
-
-def download_status(cnx, database_type):
- if database_type == "postgresql":
- from psycopg.rows import dict_row
- cursor = cnx.cursor(row_factory=dict_row)
- query = 'SELECT DownloadEnabled FROM "AppSettings"'
- else: # MySQL or MariaDB
- cursor = cnx.cursor(dictionary=True)
- query = "SELECT DownloadEnabled FROM AppSettings"
-
- cursor.execute(query)
- result = cursor.fetchone()
- cursor.close()
-
- if result:
- if isinstance(result, dict):
- download_enabled = result.get('DownloadEnabled') or result.get('downloadenabled')
- else: # Handle the case where result is a tuple
- download_enabled = result[0]
-
- if download_enabled == 1:
- return True
-
- return False
-
-
-
-
-def guest_status(cnx, database_type):
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'SELECT Email FROM "Users" WHERE Email = \'active\''
- else: # MySQL or MariaDB
- query = "SELECT Email FROM Users WHERE Email = 'active'"
-
- cursor.execute(query)
- result = cursor.fetchone()
- cursor.close()
-
- if result:
- return True
- else:
- return False
-
-
-def enable_disable_guest(cnx, database_type):
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'UPDATE "Users" SET Email = CASE WHEN Email = \'inactive\' THEN \'active\' ELSE \'inactive\' END WHERE Username = \'guest\''
- else: # MySQL or MariaDB
- query = "UPDATE Users SET Email = CASE WHEN Email = 'inactive' THEN 'active' ELSE 'inactive' END WHERE Username = 'guest'"
-
- cursor.execute(query)
- cnx.commit()
- cursor.close()
-
-
-
-def enable_disable_downloads(cnx, database_type):
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'UPDATE "AppSettings" SET DownloadEnabled = CASE WHEN DownloadEnabled = true THEN false ELSE true END'
- else: # MySQL or MariaDB
- query = "UPDATE AppSettings SET DownloadEnabled = CASE WHEN DownloadEnabled = 1 THEN 0 ELSE 1 END"
-
- cursor.execute(query)
- cnx.commit()
- cursor.close()
-
-
-
-
-def check_admin_exists(cnx, database_type):
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- query = """
- SELECT COUNT(*) as count FROM "Users"
- WHERE IsAdmin = TRUE
- AND Username != 'background_tasks'
- """
- else: # MySQL or MariaDB
- query = """
- SELECT COUNT(*) FROM Users
- WHERE IsAdmin = 1
- AND Username != 'background_tasks'
- """
- cursor.execute(query)
- result = cursor.fetchone()
-
- if result:
- if isinstance(result, dict):
- return result['count']
- else:
- return result[0]
- return 0
- finally:
- cursor.close()
-
-def self_service_status(cnx, database_type):
- cursor = cnx.cursor()
- try:
- # Get self-service status
- if database_type == "postgresql":
- query = 'SELECT SelfServiceUser FROM "AppSettings" WHERE SelfServiceUser = TRUE'
- else: # MySQL or MariaDB
- query = "SELECT SelfServiceUser FROM AppSettings WHERE SelfServiceUser = 1"
- cursor.execute(query)
- self_service_result = cursor.fetchone()
-
- # Get admin status
- admin_exists = check_admin_exists(cnx, database_type)
-
- return {
- "status": bool(self_service_result),
- "first_admin_created": bool(admin_exists > 0) # Convert to boolean
- }
- finally:
- cursor.close()
-
-def enable_disable_self_service(cnx, database_type):
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'UPDATE "AppSettings" SET SelfServiceUser = CASE WHEN SelfServiceUser = true THEN false ELSE true END'
- else: # MySQL or MariaDB
- query = "UPDATE AppSettings SET SelfServiceUser = CASE WHEN SelfServiceUser = 1 THEN 0 ELSE 1 END"
-
- cursor.execute(query)
- cnx.commit()
- cursor.close()
-
-
-
-def verify_api_key(cnx, database_type, passed_key):
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'SELECT * FROM "APIKeys" WHERE APIKey = %s'
- else:
- query = "SELECT * FROM APIKeys WHERE APIKey = %s"
- try:
- cursor.execute(query, (passed_key,))
- result = cursor.fetchone()
- return True if result else False
- except Exception as e:
- logging.error(f'verify_api_key error: {str(e)}')
- return False
- finally:
- cursor.close()
-
-def get_user_gpodder_status(cnx, database_type, user_id):
- cursor = cnx.cursor()
- try:
- print(f"Getting status for user_id: {user_id}")
-
- if database_type == "postgresql":
- query = 'SELECT Pod_Sync_Type, GpodderUrl, GpodderLoginName FROM "Users" WHERE UserID = %s'
- else:
- query = 'SELECT Pod_Sync_Type, GpodderUrl, GpodderLoginName FROM Users WHERE UserID = %s'
-
- cursor.execute(query, (user_id,))
- user_data = cursor.fetchone()
- print(f"Raw user_data: {user_data}, type: {type(user_data)}")
-
- if not user_data:
- print("No user data found")
- return None
-
- # Handle both dict and tuple return types
- if isinstance(user_data, dict):
- print("Handling dict type return")
- # Check for both uppercase and lowercase keys
- sync_type = user_data.get('Pod_Sync_Type') or user_data.get('pod_sync_type')
- print(f"Dict sync_type before default: {sync_type}")
- sync_type = sync_type if sync_type else "None"
- gpodder_url = user_data.get('GpodderUrl') or user_data.get('gpodderurl')
- gpodder_login = user_data.get('GpodderLoginName') or user_data.get('gpodderloginname')
- else:
- # It's a tuple/list
- print("Handling tuple/list type return")
- sync_type = user_data[0]
- print(f"Tuple sync_type before default: {sync_type}")
- sync_type = sync_type if sync_type else "None"
- gpodder_url = user_data[1] if len(user_data) > 1 else None
- gpodder_login = user_data[2] if len(user_data) > 2 else None
-
- print(f"Final sync_type: {sync_type}")
-
- result = {
- "sync_type": sync_type,
- "gpodder_url": gpodder_url,
- "gpodder_login": gpodder_login
- }
-
- print(f"Returning user status: {result}")
- return result
-
- except Exception as e:
- print(f"Database error in get_user_gpodder_status: {str(e)}")
- return None
- finally:
- cursor.close()
-
-def update_user_gpodder_sync(cnx, database_type, user_id, new_sync_type):
- cursor = cnx.cursor()
- try:
- print(f"Updating sync type for user_id {user_id} to {new_sync_type}")
- cursor.execute(
- 'UPDATE "Users" SET Pod_Sync_Type = %s WHERE UserID = %s',
- (new_sync_type, user_id)
- )
- rows_affected = cursor.rowcount
- print(f"Rows affected by update: {rows_affected}")
- cnx.commit()
- print("Transaction committed")
-
- # Verify the update was successful
- verify_cursor = cnx.cursor()
- verify_cursor.execute(
- 'SELECT Pod_Sync_Type FROM "Users" WHERE UserID = %s',
- (user_id,)
- )
- updated_value = verify_cursor.fetchone()
- verify_cursor.close()
- print(f"Verification after update: {updated_value}")
-
- return rows_affected > 0
- except Exception as e:
- print(f"Database error in update_user_gpodder_sync: {e}")
- return False
- finally:
- cursor.close()
-
-
-def get_rss_feed_status(cnx, database_type: str, user_id: int) -> bool:
- cursor = cnx.cursor()
- logging.info(f"Checking RSS feed status for user {user_id}")
- try:
- if database_type == "postgresql":
- cursor.execute('SELECT enablerssfeeds FROM "Users" WHERE userid = %s', (user_id,))
- else:
- cursor.execute("SELECT EnableRSSFeeds FROM Users WHERE UserID = %s", (user_id,))
-
- result = cursor.fetchone()
- logging.info(f"RSS feed status raw result: {result}")
-
- value = get_value_from_result(result, 'enablerssfeeds', False)
- logging.info(f"RSS feed status processed value: {value}")
-
- return bool(value)
- except Exception as e:
- logging.error(f"Error checking RSS feed status: {e}")
- return False
- finally:
- cursor.close()
-
-
-def toggle_rss_feeds(cnx, database_type: str, user_id: int) -> bool:
- cursor = cnx.cursor()
- try:
- # Get current status
- if database_type == "postgresql":
- cursor.execute('SELECT EnableRSSFeeds FROM "Users" WHERE UserID = %s', (user_id,))
- else:
- cursor.execute("SELECT EnableRSSFeeds FROM Users WHERE UserID = %s", (user_id,))
-
- current_status = cursor.fetchone()
-
- # Handle different return types from psycopg
- if current_status is None:
- # User not found, default to enabling RSS feeds
- new_status = True
- elif isinstance(current_status, dict):
- # Dictionary format (with dict_row)
- current_value = current_status.get('enablerssfeeds') or current_status.get('EnableRSSFeeds')
- new_status = not bool(current_value) if current_value is not None else True
- elif isinstance(current_status, (tuple, list)):
- # Tuple format (default psycopg behavior)
- current_value = current_status[0] if current_status else None
- new_status = not bool(current_value) if current_value is not None else True
- else:
- # Fallback - assume enabling
- new_status = True
-
- # Update status
- if database_type == "postgresql":
- cursor.execute(
- 'UPDATE "Users" SET EnableRSSFeeds = %s WHERE UserID = %s',
- (new_status, user_id)
- )
- else:
- cursor.execute(
- "UPDATE Users Set EnableRSSFeeds = %s WHERE UserID = %s",
- (new_status, user_id)
- )
- cnx.commit()
-
- # If enabling RSS feeds, create an RSS key if one doesn't exist
- if new_status:
- # Check if user already has an RSS key
- if database_type == "postgresql":
- cursor.execute('SELECT RssKeyID FROM "RssKeys" WHERE UserID = %s', (user_id,))
- else:
- cursor.execute("SELECT RssKeyID FROM RssKeys WHERE UserID = %s", (user_id,))
-
- existing_key = cursor.fetchone()
- # Check if RSS key exists - handle both tuple and dict returns
- has_existing_key = False
- if existing_key:
- if isinstance(existing_key, dict):
- has_existing_key = bool(existing_key.get('rsskeyid') or existing_key.get('RssKeyID'))
- elif isinstance(existing_key, (tuple, list)):
- has_existing_key = bool(existing_key[0])
-
- if not has_existing_key:
- try:
- # Create RSS key for all podcasts (-1 means all)
- create_rss_key(cnx, database_type, user_id, [-1])
- logging.info(f"Created RSS key for user {user_id}")
- except Exception as rss_error:
- logging.error(f"Failed to create RSS key for user {user_id}: {rss_error}")
- raise Exception(f"Failed to create RSS key: {str(rss_error)}")
-
- return new_status
- except Exception as e:
- logging.error(f"Error in toggle_rss_feeds for user {user_id}: {e}")
- raise
- finally:
- cursor.close()
-
-
-def get_user_rss_key(cnx, database_type: str, user_id: int) -> str:
- """Get the RSS key for a user"""
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- cursor.execute('SELECT RssKey FROM "RssKeys" WHERE UserID = %s', (user_id,))
- else:
- cursor.execute("SELECT RssKey FROM RssKeys WHERE UserID = %s", (user_id,))
-
- result = cursor.fetchone()
- if result:
- return result[0] if isinstance(result, tuple) else result['rsskey']
- return None
- finally:
- cursor.close()
-
-
-def parse_date_safely(date_str):
- """Safely parse a date string into a datetime object"""
- if isinstance(date_str, dt):
- return date_str if date_str.tzinfo else date_str.replace(tzinfo=timezone.utc)
-
- try:
- # PostgreSQL timestamp format
- dt_obj = dt.strptime(date_str, '%Y-%m-%d %H:%M:%S')
- return dt_obj.replace(tzinfo=timezone.utc)
- except (ValueError, TypeError):
- try:
- # Try with microseconds
- dt_obj = dt.strptime(date_str, '%Y-%m-%d %H:%M:%S.%f')
- return dt_obj.replace(tzinfo=timezone.utc)
- except (ValueError, TypeError):
- try:
- # ISO format
- dt_obj = dt.fromisoformat(date_str.replace('Z', '+00:00'))
- return dt_obj if dt_obj.tzinfo else dt_obj.replace(tzinfo=timezone.utc)
- except (ValueError, TypeError):
- # Default to current time if all parsing fails
- return dt.now(timezone.utc)
-
-
-def get_value_from_rss_result(result, key_name: str, default=None):
- """Helper function to safely extract values from psycopg results"""
- if result is None:
- return default
-
- # Handle dictionary result
- if isinstance(result, dict):
- # Try different case variations for PostgreSQL
- return result.get(key_name.lower()) or result.get(key_name.upper()) or default
-
- # Handle tuple result
- if isinstance(result, (tuple, list)) and len(result) > 0:
- return result[0] if result[0] is not None else default
-
- return default
-
-# Define the custom feed class at module level
-class PodcastFeed(feedgenerator.Rss201rev2Feed):
- def root_attributes(self):
- attrs = super().root_attributes()
- attrs['xmlns:itunes'] = 'http://www.itunes.com/dtds/podcast-1.0.dtd'
- return attrs
-
- def add_root_elements(self, handler):
- super().add_root_elements(handler)
- # Access podcast_image and podcast_name through instance variables
- if hasattr(self, 'podcast_image') and self.podcast_image:
- handler.addQuickElement('itunes:image',
- attrs={'href': self.podcast_image})
- handler.startElement('image', {})
- handler.addQuickElement('url', self.podcast_image)
- handler.addQuickElement('title', self.podcast_name)
- handler.addQuickElement('link', 'https://github.com/madeofpendletonwool/pinepods')
- handler.endElement('image')
-
- def add_item_elements(self, handler, item):
- super().add_item_elements(handler, item)
- if 'artwork_url' in item:
- handler.addQuickElement('itunes:image',
- attrs={'href': item['artwork_url']})
-
-
-def generate_podcast_rss(database_type: str, cnx, rss_key: dict, limit: int, source_type: str, domain: str, podcast_id: Optional[List[int]] = None) -> str:
- from datetime import datetime as dt, timezone
- cursor = cnx.cursor()
- logging.basicConfig(level=logging.INFO)
- logger = logging.getLogger(__name__)
- user_id = rss_key.get('user_id')
- podcast_ids = rss_key.get('podcast_ids')
- key = rss_key.get('key')
-
- # If podcast_id parameter is provided, use it; otherwise use RSS key podcast_ids
- print(f'DEBUG: podcast_id param: {podcast_id}, type: {type(podcast_id)}')
- print(f'DEBUG: rss_key podcast_ids: {podcast_ids}, type: {type(podcast_ids)}')
-
- explicit_podcast_filter = False
- if podcast_id and len(podcast_id) > 0:
- podcast_ids = podcast_id
- explicit_podcast_filter = True
- print(f'DEBUG: Using explicit podcast filter, podcast_ids set to: {podcast_ids}')
-
- podcast_filter = explicit_podcast_filter or (podcast_ids and len(podcast_ids) > 0 and -1 not in podcast_ids)
- print(f'DEBUG: podcast_filter: {podcast_filter}, explicit_podcast_filter: {explicit_podcast_filter}')
- try:
- # Check if RSS feeds are enabled for user
- if not get_rss_feed_status(cnx, database_type, user_id):
- raise HTTPException(status_code=403, detail="RSS feeds not enabled for this user")
-
- # Get user info for feed metadata
- if database_type == "postgresql":
- cursor.execute('SELECT username FROM "Users" WHERE userid = %s', (user_id,))
- else:
- cursor.execute("SELECT Username FROM Users WHERE UserID = %s", (user_id,))
-
- user = cursor.fetchone()
- if not user:
- raise HTTPException(status_code=404, detail="User not found")
-
- username = get_value_from_rss_result(user, 'username', 'Unknown User')
-
- if not source_type or source_type != "youtube":
- # Build the query with correct case for each database type
- if database_type == "postgresql":
- base_query = '''
- SELECT
- e.episodeid,
- e.podcastid,
- e.episodetitle,
- e.episodedescription,
- CASE WHEN de.episodeid IS NULL
- THEN e.episodeurl
- ELSE CONCAT(CAST(%s AS TEXT), '/api/data/stream/', e.episodeid, '?api_key=', CAST(%s AS TEXT), '&user_id=', pp.userid)
- END as episodeurl,
- e.episodeartwork,
- e.episodepubdate,
- e.episodeduration,
- pp.podcastname,
- pp.author,
- pp.artworkurl,
- pp.description as podcastdescription
- FROM "Episodes" e
- JOIN "Podcasts" pp ON e.podcastid = pp.podcastid
- LEFT JOIN "DownloadedEpisodes" de ON e.episodeid = de.episodeid
- WHERE pp.userid = %s
- '''
- else:
- base_query = '''
- SELECT
- e.EpisodeID,
- e.PodcastID,
- e.EpisodeTitle COLLATE utf8mb4_unicode_ci as EpisodeTitle,
- e.EpisodeDescription COLLATE utf8mb4_unicode_ci as EpisodeDescription,
- CASE WHEN de.EpisodeID IS NULL
- THEN e.EpisodeURL COLLATE utf8mb4_unicode_ci
- ELSE CONCAT(CAST(%s AS CHAR), '/api/data/stream/', CAST(e.EpisodeID AS CHAR), '?api_key=', CAST(%s AS CHAR), '&user_id=', pp.UserID)
- END COLLATE utf8mb4_unicode_ci as EpisodeURL,
- e.EpisodeArtwork COLLATE utf8mb4_unicode_ci as EpisodeArtwork,
- e.EpisodePubDate,
- e.EpisodeDuration,
- pp.PodcastName COLLATE utf8mb4_unicode_ci as PodcastName,
- pp.Author COLLATE utf8mb4_unicode_ci as Author,
- pp.ArtworkURL COLLATE utf8mb4_unicode_ci as ArtworkURL,
- pp.Description COLLATE utf8mb4_unicode_ci as PodcastDescription
- FROM Episodes e
- JOIN Podcasts pp ON e.PodcastID = pp.PodcastID
- LEFT JOIN DownloadedEpisodes de ON e.EpisodeID = de.EpisodeID
- WHERE pp.UserID = %s
- '''
-
- params = [domain, key, user_id]
- if podcast_filter:
- if database_type == "postgresql":
- base_query += ' AND pp.podcastid = ANY(%s)'
- params.append(podcast_ids)
- else:
- placeholders = ','.join(['%s'] * len(podcast_ids))
- base_query += f' AND pp.PodcastID IN ({placeholders})'
- params.extend(podcast_ids)
-
- # For MySQL, only add YouTube union if we actually need it to avoid collation issues
- add_youtube_union = not source_type or source_type == "youtube"
- if database_type != "postgresql":
- # For MySQL, check if any of the filtered podcasts are actually YouTube channels
- if podcast_filter and add_youtube_union:
- cursor_temp = cnx.cursor()
- placeholders = ','.join(['%s'] * len(podcast_ids))
- cursor_temp.execute(f"SELECT COUNT(*) FROM Podcasts WHERE PodcastID IN ({placeholders}) AND IsYouTubeChannel = 1", podcast_ids)
- youtube_count = cursor_temp.fetchone()[0]
- cursor_temp.close()
- add_youtube_union = youtube_count > 0
-
- if add_youtube_union:
- if base_query:
- base_query += "\nUNION ALL\n"
-
- if database_type == "postgresql":
- base_query += '''
- SELECT
- y.videoid as episodeid,
- y.podcastid,
- y.videotitle as episodetitle,
- y.videodescription as episodetitle,
- CONCAT(CAST(%s AS TEXT), '/api/data/stream/', CAST(y.videoid AS TEXT), '?api_key=', CAST(%s AS TEXT), '&type=youtube&user_id=', pv.userid) as episodeurl,
- y.thumbnailurl as episodeartwork,
- y.publishedat as episodepubdate,
- y.duration as episodeduration,
- pv.podcastname,
- pv.author,
- pv.artworkurl,
- pv.description as podcastdescription
- FROM "YouTubeVideos" y
- JOIN "Podcasts" pv on y.podcastid = pv.podcastid
- WHERE pv.userid = %s
- '''
- else:
- base_query += '''
- SELECT
- y.VideoID as EpisodeID,
- y.PodcastID as PodcastID,
- y.VideoTitle COLLATE utf8mb4_unicode_ci as EpisodeTitle,
- y.VideoDescription COLLATE utf8mb4_unicode_ci as EpisodeDescription,
- CONCAT(CAST(%s AS CHAR), '/api/data/stream/', CAST(y.VideoID AS CHAR), '?api_key=', CAST(%s AS CHAR), '&type=youtube&user_id=', pv.UserID) COLLATE utf8mb4_unicode_ci as EpisodeURL,
- y.ThumbnailURL COLLATE utf8mb4_unicode_ci as EpisodeArtwork,
- y.PublishedAt as EpisodePubDate,
- y.Duration as EpisodeDuration,
- pv.PodcastName COLLATE utf8mb4_unicode_ci as PodcastName,
- pv.Author COLLATE utf8mb4_unicode_ci as Author,
- pv.ArtworkURL COLLATE utf8mb4_unicode_ci as ArtworkURL,
- pv.Description COLLATE utf8mb4_unicode_ci as PodcastDescription
- FROM YouTubeVideos y
- JOIN Podcasts pv on y.PodcastID = pv.PodcastID
- WHERE pv.UserID = %s
- '''
- params += [domain, key, user_id]
-
- if podcast_filter:
- if database_type == "postgresql":
- base_query += ' AND y.podcastid = ANY(%s)'
- params.append(podcast_ids)
- else:
- placeholders = ','.join(['%s'] * len(podcast_ids))
- base_query += f' AND y.PodcastID IN ({placeholders})'
- params.extend(podcast_ids)
-
- base_query += f' ORDER BY 7 DESC'
- # Only apply limit if no specific podcast is requested
- if not explicit_podcast_filter:
- base_query += ' LIMIT %s'
- params.append(limit)
- cursor.execute(base_query, params)
- print('q1')
- # Get column names and create result mapping
- columns = [desc[0].lower() for desc in cursor.description]
- column_map = {name: idx for idx, name in enumerate(columns)}
- # Inside generate_podcast_rss, replace the dictionary creation section with:
-
- episodes = []
- all_rows = cursor.fetchall()
-
- for row_idx, row in enumerate(all_rows):
- try:
- episode_dict = {}
-
- # If row is already a dictionary, use it directly
- if isinstance(row, dict):
- source_dict = row
- else:
- # Convert tuple to dictionary using column names
- source_dict = dict(zip(columns, row))
-
- # Process each column
- for col in columns:
- try:
-
- # Get value either from dictionary or by index
- if isinstance(row, dict):
- raw_value = row.get(col)
- else:
- col_idx = column_map[col]
- raw_value = row[col_idx] if col_idx < len(row) else None
-
- # Special handling for dates
- if col == 'episodepubdate' and raw_value is not None:
- try:
- if isinstance(raw_value, dt):
- value = raw_value if raw_value.tzinfo else raw_value.replace(tzinfo=timezone.utc)
- else:
- value = dt.strptime(str(raw_value), '%Y-%m-%d %H:%M:%S')
- value = value.replace(tzinfo=timezone.utc)
- except Exception as e:
- logger.error(f"Date parsing failed: {str(e)}")
- value = dt.now(timezone.utc)
- else:
- value = raw_value if raw_value is not None else ''
-
- episode_dict[col] = value
-
- except Exception as e:
- logger.error(f"Error processing column {col}: {str(e)}", exc_info=True)
- # Use safe defaults
- if col == 'episodepubdate':
- episode_dict[col] = dt.now(timezone.utc)
- else:
- episode_dict[col] = ''
-
- episodes.append(episode_dict)
-
- except Exception as e:
- logger.error(f"Error processing row {row_idx}: {str(e)}", exc_info=True)
- continue
-
- logger.info(f"Successfully processed {len(episodes)} episodes")
-
- # Get podcast name if podcast_id is provided
- podcast_name = "All Podcasts"
- feed_image = "/var/www/html/static/assets/favicon.png" # Default to Pinepods logo
-
- # Get podcast details when filtering by specific podcast(s)
- if podcast_filter:
- try:
- if database_type == "postgresql":
- cursor.execute(
- 'SELECT podcastname, artworkurl, description FROM "Podcasts" WHERE podcastid = ANY(%s)',
- (podcast_ids,)
- )
- else:
- # For single podcast ID, use direct equals instead of IN
- if len(podcast_ids) == 1:
- cursor.execute(
- "SELECT PodcastName, ArtworkURL, Description FROM Podcasts WHERE PodcastID = %s",
- (podcast_ids[0],)
- )
- else:
- placeholders = ','.join(['%s'] * len(podcast_ids))
- cursor.execute(
- f"SELECT PodcastName, ArtworkURL, Description FROM Podcasts WHERE PodcastID IN ({placeholders})",
- tuple(podcast_ids)
- )
- result = cursor.fetchone()
- if result:
- if isinstance(result, tuple):
- podcast_name = result[0] or "Unknown Podcast"
- feed_image = result[1] or feed_image
- podcast_description = result[2] or "No description available"
- else:
- podcast_name = result.get('podcastname') or result.get('PodcastName') or "Unknown Podcast"
- feed_image = result.get('artworkurl') or result.get('ArtworkURL') or feed_image
- podcast_description = result.get('description') or result.get('Description') or "No description available"
- else:
- podcast_name = "Unknown Podcast"
- podcast_description = "No description available"
- except Exception as e:
- logger.error(f"Error fetching podcast details: {str(e)}")
- podcast_name = "Unknown Podcast"
- podcast_description = "No description available"
-
- # Set appropriate description based on whether we're filtering by specific podcast
- if podcast_filter and 'podcast_description' in locals():
- feed_description = podcast_description
- else:
- feed_description = f"RSS feed for {'all' if not podcast_filter else 'selected'} podcasts from Pinepods"
-
- # Initialize feed with custom class
- feed = PodcastFeed(
- title=f"Pinepods - {podcast_name}",
- link="https://github.com/madeofpendletonwool/pinepods",
- description=feed_description,
- language="en",
- author_name=username,
- feed_url="",
- ttl="60"
- )
-
- # Set feed image - use podcast artwork for specific podcast, Pinepods logo for all podcasts
- feed.podcast_image = feed_image
- feed.podcast_name = podcast_name
-
- # Set podcast image if available
- if episodes:
- feed.podcast_image = episodes[0].get('artworkurl')
- feed.podcast_name = podcast_name
-
- # Debug logging for image URLs
- logger.info(f"Podcast artwork URL: {episodes[0].get('artworkurl') if episodes else 'None'}")
-
- # Add items to feed
- for episode in episodes:
- try:
- episode_image = episode.get('episodeartwork') or episode.get('artworkurl', '')
- # Ensure URLs don't have double-encoded ampersands
- episode_url = str(episode.get('episodeurl', '')).replace('&', '&')
-
- feed.add_item(
- title=str(episode.get('episodetitle', 'Untitled Episode')),
- link=episode_url,
- description=str(episode.get('episodedescription', '')),
- unique_id=str(episode.get('episodeid', '')),
- enclosure=feedgenerator.Enclosure(
- url=episode_url,
- length=str(episode.get('episodeduration', '0')),
- mime_type='audio/mpeg'
- ),
- pubdate=episode.get('episodepubdate', dt.now(timezone.utc)),
- author=str(episode.get('author', '')),
- artwork_url=episode_image
- )
- except Exception as e:
- logger.error(f"Error adding episode to feed: {str(e)}")
- continue
-
- # Generate RSS and fix URL encoding
- rss_content = feed.writeString('utf-8')
- # Fix XML-escaped ampersands in URLs to ensure they work properly
- rss_content = rss_content.replace('&user_id=', '&user_id=')
- rss_content = rss_content.replace('&api_key=', '&api_key=')
- rss_content = rss_content.replace('&type=', '&type=')
- # Fix HTML entities that should be actual HTML tags in descriptions
- rss_content = rss_content.replace('<', '<')
- rss_content = rss_content.replace('>', '>')
- rss_content = rss_content.replace('"', '"')
- rss_content = rss_content.replace(''', "'")
- # Note: Keep & as-is since it's a valid XML entity
- return rss_content
-
- except Exception as e:
- logger.error(f"Error generating RSS feed: {str(e)}", exc_info=True)
- raise HTTPException(status_code=500, detail=f"Error generating RSS feed: {str(e)}")
- finally:
- cursor.close()
-
-
-def set_rss_feed_status(cnx, database_type: str, user_id: int, enable: bool) -> bool:
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- cursor.execute(
- 'UPDATE "Users" SET EnableRSSFeeds = %s WHERE UserID = %s',
- (enable, user_id)
- )
- else:
- cursor.execute(
- "UPDATE Users SET EnableRSSFeeds = %s WHERE UserID = %s",
- (enable, user_id)
- )
- cnx.commit()
- return enable
- finally:
- cursor.close()
-
-
-def get_api_key(cnx, database_type, username):
- try:
- cursor = cnx.cursor()
- # Get the UserID
- if database_type == "postgresql":
- query = 'SELECT UserID FROM "Users" WHERE username = %s'
- else: # MySQL or MariaDB
- query = "SELECT UserID FROM Users WHERE username = %s"
- cursor.execute(query, (username,))
- result = cursor.fetchone()
-
- # Check if a result is returned. If not, return None
- if result is None:
- print("No user found with the provided username.")
- cursor.close()
- return None
- user_id = result[0] if isinstance(result, tuple) else result["userid"]
-
- # Check the type of the result and access the is_admin value accordingly
- # is_admin = is_admin_result[0] if isinstance(is_admin_result, tuple) else is_admin_result["IsAdmin"] if is_admin_result else 0
-
-
- # Get the API Key using the fetched UserID, and limit the results to 1
- if database_type == "postgresql":
- query = 'SELECT APIKey FROM "APIKeys" WHERE UserID = %s LIMIT 1'
- else: # MySQL or MariaDB
- query = "SELECT APIKey FROM APIKeys WHERE UserID = %s LIMIT 1"
- cursor.execute(query, (user_id,))
- result = cursor.fetchone()
-
- cursor.close()
-
- # Check and return the API key or create a new one if not found
- if result:
- api_key = result[0] if isinstance(result, tuple) else result["apikey"]
- print(f"Result: {api_key}")
- return api_key # Adjust the index if the API key is in a different column
- else:
- print("No API key found for the provided user. Creating a new one...")
- return create_api_key(cnx, database_type, user_id)
-
- except Exception as e:
- print(f"An error occurred: {str(e)}")
- return f"An error occurred: {str(e)}"
-
-
-def get_api_user(cnx, database_type, api_key):
- try:
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'SELECT UserID FROM "APIKeys" WHERE APIKey = %s LIMIT 1'
- else: # MySQL or MariaDB
- query = "SELECT UserID FROM APIKeys WHERE APIKey = %s LIMIT 1"
-
- cursor.execute(query, (api_key,))
- result = cursor.fetchone()
-
- cursor.close()
-
- if result:
- user_id = result[0] if isinstance(result, tuple) else result['userid']
- print(f"Result: {user_id}")
- return user_id # Adjust the index if the API key is in a different column
- else:
- print(f"ApiKey Not Found")
- return "ApiKey Not Found"
-
- except Exception as e:
- print(f"An error occurred: {str(e)}")
- return f"An error occurred: {str(e)}"
-
-def get_value_from_result(result, key_name: str, default=None):
- """Helper function to safely extract values from psycopg results"""
- if result is None:
- return default
-
- # Handle dictionary result
- if isinstance(result, dict):
- # Try different case variations for PostgreSQL
- return result.get(key_name.lower()) or result.get(key_name.upper()) or default
-
- # Handle tuple result
- if isinstance(result, (tuple, list)):
- # For tuples, we assume the first element is what we want
- return result[0] if result[0] is not None else default
-
- return default
-
-
-def id_from_api_key(cnx, database_type: str, passed_key: str, rss_feed: bool = False):
- cursor = cnx.cursor()
- try:
- params = [passed_key]
- if database_type == "postgresql":
- query = 'SELECT userid FROM "APIKeys" WHERE apikey = %s'
- else:
- query = "SELECT UserID FROM APIKeys WHERE APIKey = %s"
-
- cursor.execute(query, tuple(params))
- result = cursor.fetchone()
- if result is None:
- logging.error("No result found for API key")
- return None
-
- try:
- user_id = get_value_from_result(result, 'userid')
- return user_id
- except Exception as e:
- logging.error(f"Error extracting user_id from result: {e}")
- # If we failed to get from dict, try tuple
- if isinstance(result, tuple) and len(result) > 0:
- return result[0]
- raise
-
- except Exception as e:
- logging.error(f"Error in id_from_api_key: {e}")
- return None
- finally:
- cursor.close()
-
-def get_rss_key_if_valid(cnx, database_type: str, passed_key: str, podcast_ids: Optional[List[int]] = None):
- filter_podcast_ids = (podcast_ids and len(podcast_ids) > 0 and -1 not in podcast_ids)
- cursor = cnx.cursor()
- try:
- params = [passed_key]
- if database_type == "postgresql":
- query = '''
- SELECT fk.userid, STRING_AGG(CAST(fkm.podcastid AS TEXT), ',') as podcastids
- FROM "RssKeys" fk
- LEFT JOIN "RssKeyMap" fkm ON fk.rsskeyid = fkm.rsskeyid
- WHERE fk.rsskey = %s
- GROUP BY fk.userid
- '''
- else:
- query = '''
- SELECT fk.UserID, GROUP_CONCAT(fkm.PodcastID) as podcastids
- FROM RssKeys fk
- LEFT JOIN RssKeyMap fkm ON fk.RssKeyID = fkm.RssKeyID
- WHERE fk.RssKey = %s
- GROUP BY fk.UserID
- '''
-
- cursor.execute(query, tuple(params))
- result = cursor.fetchone()
-
- if result is None:
- logging.error("No result found for Feed Key")
- return None
-
- try:
- user_id = get_value_from_result(result, 'userid')
- key_podcast_ids = get_value_from_result(result, 'podcastids')
- logging.info(f"Successfully extracted user_id: {user_id} and podcast_ids: {key_podcast_ids}")
-
- # Convert podcast_ids string to list of integers
- podcast_ids_list = []
- if key_podcast_ids:
- podcast_ids_list = [int(pid) for pid in key_podcast_ids.split(',')]
-
- if filter_podcast_ids:
- if not podcast_ids_list or len(podcast_ids_list) == 0 or -1 in podcast_ids_list:
- podcast_ids_list = podcast_ids
- else:
- podcast_ids_list = [pid for pid in podcast_ids_list if pid in podcast_ids]
-
- return {
- 'user_id': user_id,
- 'podcast_ids': podcast_ids_list,
- 'key': passed_key
- }
- except Exception as e:
- logging.error(f"Error extracting data from result: {e}")
- # If we failed to get from dict, try tuple
- if isinstance(result, tuple) and len(result) > 0:
- user_id = result[0]
- key_podcast_ids = result[1] if len(result) > 1 else None
- podcast_ids_list = []
- if key_podcast_ids:
- podcast_ids_list = [int(pid) for pid in key_podcast_ids.split(',')]
- if filter_podcast_ids:
- if not podcast_ids_list or len(podcast_ids_list) == 0 or -1 in podcast_ids_list:
- podcast_ids_list = podcast_ids
- else:
- podcast_ids_list = [pid for pid in podcast_ids_list if pid in podcast_ids]
- return {
- 'user_id': user_id,
- 'podcast_ids': podcast_ids_list,
- 'key': passed_key
- }
- raise
-
- except Exception as e:
- logging.error(f"Error in podcasts_from_rss_key: {e}")
- return None
- finally:
- cursor.close()
-
-def validate_episode_access(cnx, database_type: str, episode_id: int, podcast_ids: Optional[List[int]] = []):
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- query = '''
- SELECT COUNT(*)
- FROM "Podcasts"
- JOIN "Episodes" ON "Podcasts".PodcastID = "Episodes".PodcastID
- JOIN "YouTubeVideos" ON "Podcasts".PodcastID = "YouTubeVideos".PodcastID
- WHERE ("Episodes".EpisodeID = %s OR "YouTubeVideos".VideoID = %s)
- AND ("Podcasts".PodcastID IN(%s))
- '''
- else:
- query = '''
- SELECT COUNT(*)
- FROM "Podcasts"
- JOIN "Episodes" ON "Podcasts".PodcastID = "Episodes".PodcastID
- JOIN "YouTubeVideos" ON "Podcasts".PodcastID = "YouTubeVideos".PodcastID
- WHERE ("Episodes".EpisodeID = %s OR "YouTubeVideos".VideoID = %s)
- AND ("Podcasts".PodcastID IN(%s))
- '''
- cursor.execute(query, (episode_id, episode_id, podcast_ids))
- result = cursor.fetchone()
- return result[0] > 0
- except Exception as e:
- logging.error(f"Error in validate_episode_access: {e}")
- return False
- finally:
- cursor.close()
-
-# def check_api_permission(cnx, passed_key):
-# import tempfile
-# # Create a temporary file to store the content. This is because the mysql command reads from a file.
-# with tempfile.NamedTemporaryFile(mode='w+', delete=True) as tempf:
-# tempf.write(server_restore_data)
-# tempf.flush()
-# cmd = [
-# "mysql",
-# "-h", 'db',
-# "-P", '3306',
-# "-u", "root",
-# "-p" + database_pass,
-# "pypods_database"
-# ]
-
-# # Use the file's content as input for the mysql command
-# with open(tempf.name, 'r') as file:
-# process = subprocess.Popen(cmd, stdin=file, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-# stdout, stderr = process.communicate()
-
-# if process.returncode != 0:
-# raise Exception(f"Restoration failed with error: {stderr.decode()}")
-
-# return "Restoration completed successfully!"
-
-
-def get_stats(cnx, database_type, user_id):
- logging.info(f"Fetching stats for user ID: {user_id}, database type: {database_type}")
- cursor = cnx.cursor()
-
- # First get the user stats
- if database_type == "postgresql":
- query = 'SELECT UserCreated, PodcastsPlayed, TimeListened, PodcastsAdded, EpisodesSaved, EpisodesDownloaded FROM "UserStats" WHERE UserID = %s'
- else: # MySQL or MariaDB
- query = "SELECT UserCreated, PodcastsPlayed, TimeListened, PodcastsAdded, EpisodesSaved, EpisodesDownloaded FROM UserStats WHERE UserID = %s"
-
- print('getting stats')
- cursor.execute(query, (user_id,))
- stats_results = cursor.fetchall()
- print(f'stats {stats_results}')
- logging.info(f"Stats query results: {stats_results}")
-
- if not stats_results:
- logging.warning(f"No stats found for user ID: {user_id}")
- return None
-
- stats_result = stats_results[0]
-
- # Now get ONLY GpodderUrl and Pod_Sync_Type from Users table
- if database_type == "postgresql":
- gpodder_query = 'SELECT GpodderUrl, Pod_Sync_Type FROM "Users" WHERE UserID = %s'
- else: # MySQL or MariaDB
- gpodder_query = "SELECT GpodderUrl, Pod_Sync_Type FROM Users WHERE UserID = %s"
-
- cursor.execute(gpodder_query, (user_id,))
- gpodder_results = cursor.fetchone()
- cursor.close()
-
- logging.info(f"GPodder query results: {gpodder_results}")
-
- # Check if stats_result is a dictionary or a tuple and create stats accordingly
- if isinstance(stats_result, dict):
- if database_type == 'postgresql':
- stats = {
- "UserCreated": stats_result['usercreated'],
- "PodcastsPlayed": stats_result['podcastsplayed'],
- "TimeListened": stats_result['timelistened'],
- "PodcastsAdded": stats_result['podcastsadded'],
- "EpisodesSaved": stats_result['episodessaved'],
- "EpisodesDownloaded": stats_result['episodesdownloaded']
- }
- else:
- stats = {
- "UserCreated": stats_result['UserCreated'],
- "PodcastsPlayed": stats_result['PodcastsPlayed'],
- "TimeListened": stats_result['TimeListened'],
- "PodcastsAdded": stats_result['PodcastsAdded'],
- "EpisodesSaved": stats_result['EpisodesSaved'],
- "EpisodesDownloaded": stats_result['EpisodesDownloaded']
- }
- else: # Assume it's a tuple
- stats = {
- "UserCreated": stats_result[0],
- "PodcastsPlayed": stats_result[1],
- "TimeListened": stats_result[2],
- "PodcastsAdded": stats_result[3],
- "EpisodesSaved": stats_result[4],
- "EpisodesDownloaded": stats_result[5]
- }
-
- # Add ONLY GpodderUrl and Pod_Sync_Type to the stats
- if isinstance(gpodder_results, dict):
- if database_type == 'postgresql':
- stats.update({
- "GpodderUrl": gpodder_results['gpodderurl'],
- "Pod_Sync_Type": gpodder_results['pod_sync_type']
- })
- else:
- stats.update({
- "GpodderUrl": gpodder_results['GpodderUrl'],
- "Pod_Sync_Type": gpodder_results['Pod_Sync_Type']
- })
- else: # Assume it's a tuple
- stats.update({
- "GpodderUrl": gpodder_results[0],
- "Pod_Sync_Type": gpodder_results[1]
- })
-
- logging.info(f"Fetched stats with GPodder info: {stats}")
- return stats
-
-
-def saved_episode_list(database_type, cnx, user_id):
- if database_type == "postgresql":
- cnx.row_factory = dict_row
- cursor = cnx.cursor()
- query = """
- SELECT * FROM (
- SELECT
- "Podcasts".PodcastName as podcastname,
- "Episodes".EpisodeTitle as episodetitle,
- "Episodes".EpisodePubDate as episodepubdate,
- "Episodes".EpisodeDescription as episodedescription,
- "Episodes".EpisodeID as episodeid,
- "Episodes".EpisodeArtwork as episodeartwork,
- "Episodes".EpisodeURL as episodeurl,
- "Episodes".EpisodeDuration as episodeduration,
- "Podcasts".WebsiteURL as websiteurl,
- "UserEpisodeHistory".ListenDuration as listenduration,
- "Episodes".Completed as completed,
- TRUE as saved,
- CASE WHEN "EpisodeQueue".EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS queued,
- CASE WHEN "DownloadedEpisodes".EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded,
- FALSE as is_youtube
- FROM "SavedEpisodes"
- INNER JOIN "Episodes" ON "SavedEpisodes".EpisodeID = "Episodes".EpisodeID
- INNER JOIN "Podcasts" ON "Episodes".PodcastID = "Podcasts".PodcastID
- LEFT JOIN "UserEpisodeHistory" ON
- "SavedEpisodes".EpisodeID = "UserEpisodeHistory".EpisodeID
- AND "UserEpisodeHistory".UserID = %s
- LEFT JOIN "EpisodeQueue" ON
- "SavedEpisodes".EpisodeID = "EpisodeQueue".EpisodeID
- AND "EpisodeQueue".UserID = %s
- AND "EpisodeQueue".is_youtube = FALSE
- LEFT JOIN "DownloadedEpisodes" ON
- "SavedEpisodes".EpisodeID = "DownloadedEpisodes".EpisodeID
- AND "DownloadedEpisodes".UserID = %s
- WHERE "SavedEpisodes".UserID = %s
-
- UNION ALL
-
- SELECT
- "Podcasts".PodcastName as podcastname,
- "YouTubeVideos".VideoTitle as episodetitle,
- "YouTubeVideos".PublishedAt as episodepubdate,
- "YouTubeVideos".VideoDescription as episodedescription,
- "YouTubeVideos".VideoID as episodeid,
- "YouTubeVideos".ThumbnailURL as episodeartwork,
- "YouTubeVideos".VideoURL as episodeurl,
- "YouTubeVideos".Duration as episodeduration,
- "Podcasts".WebsiteURL as websiteurl,
- "UserVideoHistory".ListenDuration as listenduration,
- "YouTubeVideos".Completed as completed,
- TRUE as saved,
- CASE WHEN "EpisodeQueue".EpisodeID IS NOT NULL AND "EpisodeQueue".is_youtube = TRUE THEN TRUE ELSE FALSE END AS queued,
- CASE WHEN "DownloadedVideos".VideoID IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded,
- TRUE as is_youtube
- FROM "SavedVideos"
- INNER JOIN "YouTubeVideos" ON "SavedVideos".VideoID = "YouTubeVideos".VideoID
- INNER JOIN "Podcasts" ON "YouTubeVideos".PodcastID = "Podcasts".PodcastID
- LEFT JOIN "UserVideoHistory" ON
- "SavedVideos".VideoID = "UserVideoHistory".VideoID
- AND "UserVideoHistory".UserID = %s
- LEFT JOIN "EpisodeQueue" ON
- "SavedVideos".VideoID = "EpisodeQueue".EpisodeID
- AND "EpisodeQueue".UserID = %s
- AND "EpisodeQueue".is_youtube = TRUE
- LEFT JOIN "DownloadedVideos" ON
- "SavedVideos".VideoID = "DownloadedVideos".VideoID
- AND "DownloadedVideos".UserID = %s
- WHERE "SavedVideos".UserID = %s
- ) combined
- ORDER BY episodepubdate DESC
- """
- else: # MySQL or MariaDB
- cursor = cnx.cursor(dictionary=True)
- query = """
- SELECT * FROM (
- SELECT
- Podcasts.PodcastName as podcastname,
- Episodes.EpisodeTitle as episodetitle,
- Episodes.EpisodePubDate as episodepubdate,
- Episodes.EpisodeDescription as episodedescription,
- Episodes.EpisodeID as episodeid,
- Episodes.EpisodeArtwork as episodeartwork,
- Episodes.EpisodeURL as episodeurl,
- Episodes.EpisodeDuration as episodeduration,
- Podcasts.WebsiteURL as websiteurl,
- UserEpisodeHistory.ListenDuration as listenduration,
- Episodes.Completed as completed,
- 1 as saved,
- CASE WHEN EpisodeQueue.EpisodeID IS NOT NULL THEN 1 ELSE 0 END AS queued,
- CASE WHEN DownloadedEpisodes.EpisodeID IS NOT NULL THEN 1 ELSE 0 END AS downloaded,
- 0 as is_youtube
- FROM SavedEpisodes
- INNER JOIN Episodes ON SavedEpisodes.EpisodeID = Episodes.EpisodeID
- INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID
- LEFT JOIN UserEpisodeHistory ON
- SavedEpisodes.EpisodeID = UserEpisodeHistory.EpisodeID
- AND UserEpisodeHistory.UserID = %s
- LEFT JOIN EpisodeQueue ON
- SavedEpisodes.EpisodeID = EpisodeQueue.EpisodeID
- AND EpisodeQueue.UserID = %s
- AND EpisodeQueue.is_youtube = 0
- LEFT JOIN DownloadedEpisodes ON
- SavedEpisodes.EpisodeID = DownloadedEpisodes.EpisodeID
- AND DownloadedEpisodes.UserID = %s
- WHERE SavedEpisodes.UserID = %s
-
- UNION ALL
-
- SELECT
- Podcasts.PodcastName as podcastname,
- YouTubeVideos.VideoTitle as episodetitle,
- YouTubeVideos.PublishedAt as episodepubdate,
- YouTubeVideos.VideoDescription as episodedescription,
- YouTubeVideos.VideoID as episodeid,
- YouTubeVideos.ThumbnailURL as episodeartwork,
- YouTubeVideos.VideoURL as episodeurl,
- YouTubeVideos.Duration as episodeduration,
- Podcasts.WebsiteURL as websiteurl,
- UserVideoHistory.ListenDuration as listenduration,
- YouTubeVideos.Completed as completed,
- 1 as saved,
- CASE WHEN EpisodeQueue.EpisodeID IS NOT NULL AND EpisodeQueue.is_youtube = 1 THEN 1 ELSE 0 END AS queued,
- CASE WHEN DownloadedVideos.VideoID IS NOT NULL THEN 1 ELSE 0 END AS downloaded,
- 1 as is_youtube
- FROM SavedVideos
- INNER JOIN YouTubeVideos ON SavedVideos.VideoID = YouTubeVideos.VideoID
- INNER JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID
- LEFT JOIN UserVideoHistory ON
- SavedVideos.VideoID = UserVideoHistory.VideoID
- AND UserVideoHistory.UserID = %s
- LEFT JOIN EpisodeQueue ON
- SavedVideos.VideoID = EpisodeQueue.EpisodeID
- AND EpisodeQueue.UserID = %s
- AND EpisodeQueue.is_youtube = 1
- LEFT JOIN DownloadedVideos ON
- SavedVideos.VideoID = DownloadedVideos.VideoID
- AND DownloadedVideos.UserID = %s
- WHERE SavedVideos.UserID = %s
- ) combined
- ORDER BY episodepubdate DESC
- """
-
- # Execute with all params for both unions - we now need 8 user_id parameters
- cursor.execute(query, (user_id, user_id, user_id, user_id, user_id, user_id, user_id, user_id))
- rows = cursor.fetchall()
- cursor.close()
-
- if not rows:
- return None
-
- saved_episodes = lowercase_keys(rows)
-
- if database_type != "postgresql":
- bool_fields = ['completed', 'saved', 'queued', 'downloaded', 'is_youtube']
- for episode in saved_episodes:
- for field in bool_fields:
- if field in episode:
- episode[field] = bool(episode[field])
-
- return saved_episodes
-
-def save_episode(cnx, database_type, episode_id, user_id, is_youtube=False):
- cursor = cnx.cursor()
- try:
- if is_youtube:
- if database_type == "postgresql":
- query = 'INSERT INTO "SavedVideos" (UserID, VideoID) VALUES (%s, %s)'
- else:
- query = "INSERT INTO SavedVideos (UserID, VideoID) VALUES (%s, %s)"
- else:
- if database_type == "postgresql":
- query = 'INSERT INTO "SavedEpisodes" (UserID, EpisodeID) VALUES (%s, %s)'
- else:
- query = "INSERT INTO SavedEpisodes (UserID, EpisodeID) VALUES (%s, %s)"
-
- cursor.execute(query, (user_id, episode_id))
-
- # Update UserStats table
- if database_type == "postgresql":
- query = 'UPDATE "UserStats" SET EpisodesSaved = EpisodesSaved + 1 WHERE UserID = %s'
- else:
- query = "UPDATE UserStats SET EpisodesSaved = EpisodesSaved + 1 WHERE UserID = %s"
- cursor.execute(query, (user_id,))
-
- cnx.commit()
- return True
- except Exception as e:
- print(f"Error saving {'video' if is_youtube else 'episode'}: {e}")
- return False
- finally:
- cursor.close()
-
-def check_saved(cnx, database_type, user_id, episode_id, is_youtube=False):
- cursor = cnx.cursor()
- try:
- if is_youtube:
- if database_type == "postgresql":
- query = 'SELECT * FROM "SavedVideos" WHERE UserID = %s AND VideoID = %s'
- else:
- query = "SELECT * FROM SavedVideos WHERE UserID = %s AND VideoID = %s"
- else:
- if database_type == "postgresql":
- query = 'SELECT * FROM "SavedEpisodes" WHERE UserID = %s AND EpisodeID = %s'
- else:
- query = "SELECT * FROM SavedEpisodes WHERE UserID = %s AND EpisodeID = %s"
-
- cursor.execute(query, (user_id, episode_id))
- result = cursor.fetchone()
- return bool(result)
- except Exception as err:
- print(f"Error checking saved {'video' if is_youtube else 'episode'}: {err}")
- return False
- finally:
- cursor.close()
-
-def remove_saved_episode(cnx, database_type, episode_id, user_id, is_youtube=False):
- cursor = cnx.cursor()
- try:
- logging.info(f"Removing {'video' if is_youtube else 'episode'} {episode_id} for user {user_id}")
- if is_youtube:
- if database_type == "postgresql":
- query = """
- SELECT SaveID FROM "SavedVideos"
- WHERE VideoID = %s AND UserID = %s
- """
- else:
- query = """
- SELECT SaveID FROM SavedVideos
- WHERE VideoID = %s AND UserID = %s
- """
- else:
- if database_type == "postgresql":
- query = """
- SELECT SaveID FROM "SavedEpisodes"
- WHERE EpisodeID = %s AND UserID = %s
- """
- else:
- query = """
- SELECT SaveID FROM SavedEpisodes
- WHERE EpisodeID = %s AND UserID = %s
- """
- cursor.execute(query, (episode_id, user_id))
- result = cursor.fetchone()
- if not result:
- logging.warning(f"No saved {'video' if is_youtube else 'episode'} found for ID {episode_id} and user {user_id}")
- return
-
- # Handle both dictionary and tuple result types
- save_id = result['saveid'] if isinstance(result, dict) else result[0]
- logging.info(f"Found SaveID: {save_id}")
-
- # Remove the saved entry
- if is_youtube:
- if database_type == "postgresql":
- query = 'DELETE FROM "SavedVideos" WHERE SaveID = %s'
- else:
- query = "DELETE FROM SavedVideos WHERE SaveID = %s"
- else:
- if database_type == "postgresql":
- query = 'DELETE FROM "SavedEpisodes" WHERE SaveID = %s'
- else:
- query = "DELETE FROM SavedEpisodes WHERE SaveID = %s"
-
- cursor.execute(query, (save_id,))
- rows_affected = cursor.rowcount
- logging.info(f"Deleted {rows_affected} rows")
-
- # Update UserStats
- if database_type == "postgresql":
- query = 'UPDATE "UserStats" SET EpisodesSaved = EpisodesSaved - 1 WHERE UserID = %s'
- else:
- query = "UPDATE UserStats SET EpisodesSaved = EpisodesSaved - 1 WHERE UserID = %s"
-
- cursor.execute(query, (user_id,))
- stats_rows_affected = cursor.rowcount
- logging.info(f"Updated {stats_rows_affected} user stats rows")
-
- cnx.commit()
- except Exception as e:
- logging.error(f"Error during {'video' if is_youtube else 'episode'} removal: {e}")
- cnx.rollback()
- finally:
- cursor.close()
-
-def get_categories(cnx, database_type, podcast_id, user_id):
- cursor = cnx.cursor()
-
- try:
- if database_type == "postgresql":
- query = (
- 'SELECT "categories" '
- 'FROM "Podcasts" '
- 'WHERE "podcastid" = %s AND "userid" = %s'
- )
- else: # For MySQL or MariaDB
- query = (
- "SELECT Categories "
- "FROM Podcasts "
- "WHERE PodcastID = %s AND UserID = %s"
- )
- cursor.execute(query, (podcast_id, user_id))
- result = cursor.fetchone()
-
- if not result:
- logging.warning("No matching podcast found.")
- cursor.close()
- return []
-
- # Check if the result is a dictionary or a tuple
- if isinstance(result, dict):
- # For dictionary, access the field by key
- categories_field = result.get('categories') # Adjust key based on your schema
- elif isinstance(result, tuple):
- # For tuple, access the field by index
- categories_field = result[0]
- else:
- logging.error(f"Unexpected result type: {type(result)}")
- return []
-
- # Split the categories if they exist
- categories = categories_field.split(', ') if categories_field else []
-
- return categories
-
- except Exception as e:
- logging.error(f"Error retrieving categories: {e}")
- raise
- finally:
- cursor.close()
-
-
-
-def add_category(cnx, database_type, podcast_id, user_id, category):
- cursor = cnx.cursor()
-
- try:
- if database_type == "postgresql":
- query = (
- 'SELECT categories '
- 'FROM "Podcasts" '
- 'WHERE "podcastid" = %s AND "userid" = %s'
- )
- else: # For MySQL or MariaDB
- query = (
- "SELECT Categories "
- "FROM Podcasts "
- "WHERE PodcastID = %s AND UserID = %s"
- )
- cursor.execute(query, (podcast_id, user_id))
- result = cursor.fetchone()
-
- if not result:
- logging.warning("No matching podcast found.")
- cursor.close()
- return False
-
- # Extract the categories and split them into a list
- # Check if the result is a dictionary or a tuple
- if isinstance(result, dict):
- # For dictionary, access the field by key
- categories_field = result.get('categories') # Adjust key based on your schema
- elif isinstance(result, tuple):
- # For tuple, access the field by index
- categories_field = result[0]
- else:
- logging.error(f"Unexpected result type: {type(result)}")
- return []
-
- # Split the categories if they exist
- categories = categories_field.split(', ') if categories_field else []
-
-
- # Add the new category if it doesn't exist
- if category not in categories:
- categories.append(category)
-
- # Join the updated categories back into a comma-separated string
- updated_categories = ', '.join(categories)
-
- # Update the database with the new categories list
- if database_type == "postgresql":
- update_query = (
- 'UPDATE "Podcasts" '
- 'SET "categories" = %s '
- 'WHERE "podcastid" = %s AND "userid" = %s'
- )
- else:
- update_query = (
- "UPDATE Podcasts "
- "SET Categories = %s "
- "WHERE PodcastID = %s AND UserID = %s"
- )
- cursor.execute(update_query, (updated_categories, podcast_id, user_id))
- cnx.commit()
-
- return True
-
- except Exception as e:
- logging.error(f"Error adding category: {e}")
- raise
- finally:
- cursor.close()
-
-def remove_category(cnx, database_type, podcast_id, user_id, category):
- cursor = cnx.cursor()
-
- try:
- if database_type == "postgresql":
- query = (
- 'SELECT categories '
- 'FROM "Podcasts" '
- 'WHERE "podcastid" = %s AND "userid" = %s'
- )
- else: # For MySQL or MariaDB
- query = (
- "SELECT Categories "
- "FROM Podcasts "
- "WHERE PodcastID = %s AND UserID = %s"
- )
- cursor.execute(query, (podcast_id, user_id))
- result = cursor.fetchone()
-
- print(f'heres cats: {result}')
-
- if not result:
- logging.warning("No matching podcast found.")
- cursor.close()
- return
-
- # Extract the categories and split them into a list
- # Check if the result is a dictionary or a tuple
- if isinstance(result, dict):
- # For dictionary, access the field by key
- categories_field = result.get('categories') # Adjust key based on your schema
- elif isinstance(result, tuple):
- # For tuple, access the field by index
- categories_field = result[0]
- else:
- logging.error(f"Unexpected result type: {type(result)}")
- return []
-
- # Split the categories if they exist
- categories = categories_field.split(', ') if categories_field else []
-
- # Remove the category if it exists
- if category in categories:
- categories.remove(category)
-
- # Join the updated categories back into a comma-separated string
- updated_categories = ', '.join(categories)
-
- # Update the database with the new categories list
- if database_type == "postgresql":
- update_query = (
- 'UPDATE "Podcasts" '
- 'SET "categories" = %s '
- 'WHERE "podcastid" = %s AND "userid" = %s'
- )
- else:
- update_query = (
- "UPDATE Podcasts "
- "SET Categories = %s "
- "WHERE PodcastID = %s AND UserID = %s"
- )
- cursor.execute(update_query, (updated_categories, podcast_id, user_id))
- cnx.commit()
-
- except Exception as e:
- logging.error(f"Error removing category: {e}")
- raise
- finally:
- cursor.close()
-
-def update_feed_cutoff_days(cnx, database_type, podcast_id, user_id, feed_cutoff_days):
- cursor = cnx.cursor()
-
- try:
- # Validate that the podcast exists and belongs to the user
- if database_type == "postgresql":
- query = (
- 'SELECT "podcastid" '
- 'FROM "Podcasts" '
- 'WHERE "podcastid" = %s AND "userid" = %s'
- )
- else: # For MySQL or MariaDB
- query = (
- "SELECT PodcastID "
- "FROM Podcasts "
- "WHERE PodcastID = %s AND UserID = %s"
- )
- cursor.execute(query, (podcast_id, user_id))
- result = cursor.fetchone()
-
- if not result:
- logging.warning("No matching podcast found or podcast does not belong to the user.")
- cursor.close()
- return False
-
- # Update the feed cutoff days
- if database_type == "postgresql":
- update_query = (
- 'UPDATE "Podcasts" '
- 'SET "feedcutoffdays" = %s '
- 'WHERE "podcastid" = %s AND "userid" = %s'
- )
- else:
- update_query = (
- "UPDATE Podcasts "
- "SET FeedCutoffDays = %s "
- "WHERE PodcastID = %s AND UserID = %s"
- )
- cursor.execute(update_query, (feed_cutoff_days, podcast_id, user_id))
- cnx.commit()
-
- return True
-
- except Exception as e:
- logging.error(f"Error updating feed cutoff days: {e}")
- raise
- finally:
- cursor.close()
-
-def get_feed_cutoff_days(cnx, database_type, podcast_id, user_id):
- cursor = cnx.cursor()
-
- try:
- if database_type == "postgresql":
- query = (
- 'SELECT "feedcutoffdays" '
- 'FROM "Podcasts" '
- 'WHERE "podcastid" = %s AND "userid" = %s'
- )
- else: # For MySQL or MariaDB
- query = (
- "SELECT FeedCutoffDays "
- "FROM Podcasts "
- "WHERE PodcastID = %s AND UserID = %s"
- )
- cursor.execute(query, (podcast_id, user_id))
- result = cursor.fetchone()
-
- if not result:
- logging.warning("No matching podcast found.")
- cursor.close()
- return None
-
- # Check if the result is a dictionary or a tuple
- if isinstance(result, dict):
- # For dictionary, access the field by key
- feed_cutoff_days = result.get('feedcutoffdays') # PostgreSQL key
- elif isinstance(result, tuple):
- # For tuple, access the field by index
- feed_cutoff_days = result[0]
- else:
- logging.error(f"Unexpected result type: {type(result)}")
- return None
-
- return feed_cutoff_days
-
- except Exception as e:
- logging.error(f"Error getting feed cutoff days: {e}")
- raise
- finally:
- cursor.close()
-
-
-# In database_functions/functions.py
-#
-def send_ntfy_notification(topic: str, server_url: str, title: str, message: str):
- try:
- import requests
-
- # Default to ntfy.sh if no server URL provided
- base_url = server_url.rstrip('/') if server_url else "https://ntfy.sh"
- url = f"{base_url}/{topic}"
-
- headers = {
- "Title": title,
- "Content-Type": "text/plain"
- }
-
- response = requests.post(url, headers=headers, data=message)
- response.raise_for_status()
- return True
- except Exception as e:
- logging.error(f"Error sending NTFY notification: {e}")
- return False
-
-def send_gotify_notification(server_url: str, token: str, title: str, message: str):
- try:
- import requests
-
- url = f"{server_url.rstrip('/')}/message"
-
- headers = {
- "X-Gotify-Key": token
- }
-
- data = {
- "title": title,
- "message": message,
- "priority": 5
- }
-
- response = requests.post(url, headers=headers, json=data)
- response.raise_for_status()
- return True
- except Exception as e:
- logging.error(f"Error sending Gotify notification: {e}")
- return False
-
-# Base notification functions for actual episode notifications
-def send_ntfy_notification(topic: str, server_url: str, title: str, message: str):
- try:
- base_url = server_url.rstrip('/') if server_url else "https://ntfy.sh"
- url = f"{base_url}/{topic}"
- headers = {
- "Title": title,
- "Content-Type": "text/plain"
- }
- # Add short timeout - if it takes more than 2 seconds, abort
- response = requests.post(url, headers=headers, data=message, timeout=2)
- response.raise_for_status()
- return True
- except requests.Timeout:
- logging.error(f"Timeout sending notification to {url}")
- return False
- except Exception as e:
- logging.error(f"Error sending NTFY notification: {e}")
- return False
-
-def send_gotify_notification(server_url: str, token: str, title: str, message: str):
- try:
- url = f"{server_url.rstrip('/')}/message"
- data = {
- "title": title,
- "message": message,
- "priority": 5
- }
- headers = {
- "X-Gotify-Key": token
- }
- response = requests.post(url, headers=headers, json=data, timeout=2)
- response.raise_for_status()
- return True
- except requests.Timeout:
- logging.error(f"Timeout sending notification to {url}")
- return False
- except Exception as e:
- logging.error(f"Error sending Gotify notification: {e}")
- return False
-
-# Test versions that specifically mention they're test notifications
-def send_test_ntfy_notification(topic: str, server_url: str):
- return send_ntfy_notification(
- topic=topic,
- server_url=server_url,
- title="Pinepods Test Notification",
- message="This is a test notification from your Pinepods server!"
- )
-
-def send_test_gotify_notification(server_url: str, token: str):
- return send_gotify_notification(
- server_url=server_url,
- token=token,
- title="Pinepods Test Notification",
- message="This is a test notification from your Pinepods server!"
- )
-
-def send_test_notification(cnx, database_type, user_id, platform):
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- query = """
- SELECT Platform, Enabled, NtfyTopic, NtfyServerUrl, GotifyUrl, GotifyToken
- FROM "UserNotificationSettings"
- WHERE UserID = %s AND Platform = %s AND Enabled = TRUE
- """
- else:
- query = """
- SELECT Platform, Enabled, NtfyTopic, NtfyServerUrl, GotifyUrl, GotifyToken
- FROM UserNotificationSettings
- WHERE UserID = %s AND Platform = %s AND Enabled = TRUE
- """
- cursor.execute(query, (user_id, platform))
- settings = cursor.fetchone()
- if not settings:
- logging.error("No notification settings found")
- return False
-
- if isinstance(settings, dict): # PostgreSQL dict case
- if platform == 'ntfy':
- return send_test_ntfy_notification(
- topic=settings['ntfytopic'], # Note: lowercase from your logs
- server_url=settings['ntfyserverurl'] # Note: lowercase from your logs
- )
- else: # gotify
- return send_test_gotify_notification(
- server_url=settings['gotifyurl'], # Note: lowercase from your logs
- token=settings['gotifytoken'] # Note: lowercase from your logs
- )
- else: # MySQL or PostgreSQL tuple case
- if platform == 'ntfy':
- return send_test_ntfy_notification(
- settings[2], # NtfyTopic
- settings[3] # NtfyServerUrl
- )
- else: # gotify
- return send_test_gotify_notification(
- settings[4], # GotifyUrl
- settings[5] # GotifyToken
- )
- except Exception as e:
- logging.error(f"Error sending test notification: {e}")
- logging.error(f"Settings object type: {type(settings)}")
- logging.error(f"Settings content: {settings}")
- return False
- finally:
- cursor.close()
-
-def get_notification_settings(cnx, database_type, user_id):
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- query = """
- SELECT Platform, Enabled, NtfyTopic, NtfyServerUrl, GotifyUrl, GotifyToken
- FROM "UserNotificationSettings"
- WHERE UserID = %s
- """
- else: # MySQL
- query = """
- SELECT Platform, Enabled, NtfyTopic, NtfyServerUrl, GotifyUrl, GotifyToken
- FROM UserNotificationSettings
- WHERE UserID = %s
- """
-
- cursor.execute(query, (user_id,))
- result = cursor.fetchall()
-
- settings = []
- for row in result:
- if isinstance(row, dict): # PostgreSQL with RealDictCursor
- setting = {
- "platform": row["platform"],
- "enabled": bool(row["enabled"]),
- "ntfy_topic": row["ntfytopic"],
- "ntfy_server_url": row["ntfyserverurl"],
- "gotify_url": row["gotifyurl"],
- "gotify_token": row["gotifytoken"]
- }
- else: # MySQL or PostgreSQL with regular cursor
- setting = {
- "platform": row[0],
- "enabled": bool(row[1]),
- "ntfy_topic": row[2],
- "ntfy_server_url": row[3],
- "gotify_url": row[4],
- "gotify_token": row[5]
- }
- settings.append(setting)
-
- return settings
-
- except Exception as e:
- logging.error(f"Error fetching notification settings: {e}")
- raise
- finally:
- cursor.close()
-
-def update_notification_settings(cnx, database_type, user_id, platform, enabled, ntfy_topic=None,
- ntfy_server_url=None, gotify_url=None, gotify_token=None):
- cursor = cnx.cursor()
- try:
- # First check if settings exist for this user and platform
- if database_type == "postgresql":
- check_query = """
- SELECT 1 FROM "UserNotificationSettings"
- WHERE UserID = %s AND Platform = %s
- """
- else:
- check_query = """
- SELECT 1 FROM UserNotificationSettings
- WHERE UserID = %s AND Platform = %s
- """
-
- cursor.execute(check_query, (user_id, platform))
- exists = cursor.fetchone() is not None
-
- if exists:
- if database_type == "postgresql":
- query = """
- UPDATE "UserNotificationSettings"
- SET Enabled = %s,
- NtfyTopic = %s,
- NtfyServerUrl = %s,
- GotifyUrl = %s,
- GotifyToken = %s
- WHERE UserID = %s AND Platform = %s
- """
- else:
- query = """
- UPDATE UserNotificationSettings
- SET Enabled = %s,
- NtfyTopic = %s,
- NtfyServerUrl = %s,
- GotifyUrl = %s,
- GotifyToken = %s
- WHERE UserID = %s AND Platform = %s
- """
- else:
- if database_type == "postgresql":
- query = """
- INSERT INTO "UserNotificationSettings"
- (UserID, Platform, Enabled, NtfyTopic, NtfyServerUrl, GotifyUrl, GotifyToken)
- VALUES (%s, %s, %s, %s, %s, %s, %s)
- """
- else:
- query = """
- INSERT INTO UserNotificationSettings
- (UserID, Platform, Enabled, NtfyTopic, NtfyServerUrl, GotifyUrl, GotifyToken)
- VALUES (%s, %s, %s, %s, %s, %s, %s)
- """
-
- params = (
- enabled if exists else user_id,
- ntfy_topic if exists else platform,
- ntfy_server_url if exists else enabled,
- gotify_url if exists else ntfy_topic,
- gotify_token if exists else ntfy_server_url,
- user_id if exists else gotify_url,
- platform if exists else gotify_token
- )
-
- cursor.execute(query, params)
- cnx.commit()
- return True
-
- except Exception as e:
- logging.error(f"Error updating notification settings: {e}")
- cnx.rollback()
- raise
- finally:
- cursor.close()
-
-
-
-
-def increment_played(cnx, database_type, user_id):
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'UPDATE "UserStats" SET PodcastsPlayed = PodcastsPlayed + 1 WHERE UserID = %s'
- else: # MySQL or MariaDB
- query = "UPDATE UserStats SET PodcastsPlayed = PodcastsPlayed + 1 WHERE UserID = %s"
- cursor.execute(query, (user_id,))
- cnx.commit()
- cursor.close()
-
-def increment_listen_time(cnx, database_type, user_id):
- cursor = cnx.cursor()
-
- # Update UserStats table to increment PodcastsPlayed count
- if database_type == "postgresql":
- query = ('UPDATE "UserStats" SET TimeListened = TimeListened + 1 '
- "WHERE UserID = %s")
- else:
- query = ("UPDATE UserStats SET TimeListened = TimeListened + 1 "
- "WHERE UserID = %s")
- cursor.execute(query, (user_id,))
- cnx.commit()
-
- cursor.close()
- # cnx.close()
-
-
-
-def get_user_episode_count(cnx, database_type, user_id):
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = (
- 'SELECT COUNT(*) '
- 'FROM "Episodes" '
- 'INNER JOIN "Podcasts" ON "Episodes".PodcastID = "Podcasts".PodcastID '
- 'WHERE "Podcasts".UserID = %s'
- )
- else: # MySQL or MariaDB
- query = (
- "SELECT COUNT(*) "
- "FROM Episodes "
- "INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID "
- "WHERE Podcasts.UserID = %s"
- )
-
- cursor.execute(query, (user_id,))
- episode_count = cursor.fetchone()[0]
- cursor.close()
-
- return episode_count
-
-
-
-def get_user_episode_count(cnx, database_type, user_id):
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = (
- 'SELECT COUNT(*) '
- 'FROM "Episodes" '
- 'INNER JOIN "Podcasts" ON "Episodes".PodcastID = "Podcasts".PodcastID '
- 'WHERE "Podcasts".UserID = %s'
- )
- else: # MySQL or MariaDB
- query = (
- "SELECT COUNT(*) "
- "FROM Episodes "
- "INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID "
- "WHERE Podcasts.UserID = %s"
- )
-
- cursor.execute(query, (user_id,))
- episode_count = cursor.fetchone()[0]
- cursor.close()
-
- return episode_count
-
-
-def check_podcast(cnx, database_type, user_id, podcast_name, podcast_url):
- cursor = None
- try:
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'SELECT PodcastID FROM "Podcasts" WHERE UserID = %s AND PodcastName = %s AND FeedURL = %s'
- else: # MySQL or MariaDB
- query = "SELECT PodcastID FROM Podcasts WHERE UserID = %s AND PodcastName = %s AND FeedURL = %s"
-
- cursor.execute(query, (user_id, podcast_name, podcast_url))
-
- return cursor.fetchone() is not None
- except Exception:
- return False
- finally:
- if cursor:
- cursor.close()
-
-def check_youtube_channel(cnx, database_type, user_id, channel_name, channel_url):
- cursor = None
- try:
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = '''
- SELECT PodcastID
- FROM "Podcasts"
- WHERE UserID = %s
- AND PodcastName = %s
- AND FeedURL = %s
- AND IsYouTubeChannel = TRUE
- '''
- else: # MySQL or MariaDB
- query = '''
- SELECT PodcastID
- FROM Podcasts
- WHERE UserID = %s
- AND PodcastName = %s
- AND FeedURL = %s
- AND IsYouTubeChannel = TRUE
- '''
- cursor.execute(query, (user_id, channel_name, channel_url))
- return cursor.fetchone() is not None
- except Exception:
- return False
- finally:
- if cursor:
- cursor.close()
-
-def check_youtube_channel_id(cnx, database_type, podcast_id):
- cursor = None
- try:
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = '''
- SELECT IsYouTubeChannel
- FROM "Podcasts"
- WHERE PodcastID = %s
- AND IsYouTubeChannel = TRUE
- '''
- else: # MySQL or MariaDB
- query = '''
- SELECT IsYouTubeChannel
- FROM Podcasts
- WHERE PodcastID = %s
- AND IsYouTubeChannel = TRUE
- '''
- cursor.execute(query, (podcast_id,))
- result = cursor.fetchone()
-
- # Handle different return types from different database adapters
- if result is not None:
- # If result is a dict (psycopg2 with dict cursor)
- if isinstance(result, dict):
- return True
- # If result is a tuple/list (standard cursor)
- elif isinstance(result, (tuple, list)):
- return True
- # Any other non-None result means we found a match
- else:
- return True
- return False
- except Exception as e:
- print(f"Error checking if YouTube channel: {e}")
- return False
- finally:
- if cursor:
- cursor.close()
-
-def reset_password_create_code(cnx, database_type, user_email):
- reset_code = ''.join(random.choices(string.ascii_uppercase + string.digits, k=6))
- cursor = cnx.cursor()
-
- # Check if a user with this email exists
- if database_type == "postgresql":
- check_query = """
- SELECT UserID
- FROM "Users"
- WHERE Email = %s
- """
- else:
- check_query = """
- SELECT UserID
- FROM Users
- WHERE Email = %s
- """
- cursor.execute(check_query, (user_email,))
- result = cursor.fetchone()
- if result is None:
- cursor.close()
- # cnx.close()
- return False
-
- # If the user exists, update the reset code and expiry
- reset_expiry = datetime.datetime.now() + datetime.timedelta(hours=1)
-
- if database_type == "postgresql":
- update_query = """
- UPDATE "Users"
- SET Reset_Code = %s,
- Reset_Expiry = %s
- WHERE Email = %s
- """
- else:
- update_query = """
- UPDATE Users
- SET Reset_Code = %s,
- Reset_Expiry = %s
- WHERE Email = %s
- """
- params = (reset_code, reset_expiry.strftime('%Y-%m-%d %H:%M:%S'), user_email)
- try:
- cursor.execute(update_query, params)
- cnx.commit()
- except Exception as e:
- print(f"Error when trying to update reset code: {e}")
- cursor.close()
- # cnx.close()
- return False
-
- cursor.close()
- # cnx.close()
-
- return reset_code
-
-def reset_password_remove_code(cnx, database_type, email):
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'UPDATE "Users" SET Reset_Code = NULL, Reset_Expiry = NULL WHERE Email = %s'
- else:
- query = "UPDATE Users SET Reset_Code = NULL, Reset_Expiry = NULL WHERE Email = %s"
- cursor.execute(query, (email,))
- cnx.commit()
- return cursor.rowcount > 0
-
-
-def verify_password(cnx, database_type, username: str, password: str) -> bool:
- cursor = cnx.cursor()
- if database_type == "postgresql":
- cursor.execute('SELECT Hashed_PW FROM "Users" WHERE Username = %s', (username,))
- else:
- cursor.execute("SELECT Hashed_PW FROM Users WHERE Username = %s", (username,))
- result = cursor.fetchone()
- cursor.close()
-
- if not result:
- return False # User not found
-
- hashed_password = result[0]
-
- ph = PasswordHasher()
- try:
- # Attempt to verify the password
- ph.verify(hashed_password, password)
- # If verification does not raise an exception, password is correct
- # Optionally rehash the password if needed (argon2 can detect this)
- if ph.check_needs_rehash(hashed_password):
- new_hash = ph.hash(password)
- # Update database with new hash if necessary
- # You'll need to implement this part
- # update_hashed_password(cnx, username, new_hash)
- return True
- except VerifyMismatchError:
- # If verification fails, password is incorrect
- return False
-
-
-def verify_reset_code(cnx, database_type, user_email, reset_code):
- cursor = cnx.cursor()
-
- if database_type == "postgresql":
- select_query = """
- SELECT Reset_Code, Reset_Expiry
- FROM "Users"
- WHERE Email = %s
- """
- else:
- select_query = """
- SELECT Reset_Code, Reset_Expiry
- FROM Users
- WHERE Email = %s
- """
- cursor.execute(select_query, (user_email,))
- result = cursor.fetchone()
-
- cursor.close()
- # cnx.close()
-
- # Check if a user with this email exists
- if result is None:
- return None
-
- # Check if the reset code is valid and not expired
- stored_code, expiry = result
- if stored_code == reset_code and datetime.datetime.now() < expiry:
- return True
-
- return False
-
-def check_reset_user(cnx, database_type, username, email):
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'SELECT * FROM "Users" WHERE Username = %s AND Email = %s'
- else:
- query = "SELECT * FROM Users WHERE Username = %s AND Email = %s"
- cursor.execute(query, (username, email))
- result = cursor.fetchone()
- return result is not None
-
-
-def reset_password_prompt(cnx, database_type, user_email, hashed_pw):
- cursor = cnx.cursor()
- if database_type == "postgresql":
- update_query = """
- UPDATE "Users"
- SET Hashed_PW = %s,
- Reset_Code = NULL,
- Reset_Expiry = NULL
- WHERE Email = %s
- """
- else:
- update_query = """
- UPDATE Users
- SET Hashed_PW = %s,
- Reset_Code = NULL,
- Reset_Expiry = NULL
- WHERE Email = %s
- """
- params = (hashed_pw, user_email)
- cursor.execute(update_query, params)
-
- if cursor.rowcount == 0:
- return None
-
- cnx.commit()
- cursor.close()
- # cnx.close()
-
- return "Password Reset Successfully"
-
-def get_episode_metadata(database_type, cnx, episode_id, user_id, person_episode=False, is_youtube=False):
- if database_type == "postgresql":
- from psycopg.rows import dict_row
- cnx.row_factory = dict_row
- cursor = cnx.cursor()
-
- if is_youtube:
- # Query for YouTube videos
- query_youtube = """
- SELECT "Podcasts".PodcastID, "Podcasts".PodcastIndexID, "Podcasts".FeedURL,
- "Podcasts".PodcastName, "Podcasts".ArtworkURL,
- "YouTubeVideos".VideoTitle as EpisodeTitle,
- "YouTubeVideos".PublishedAt as EpisodePubDate,
- "YouTubeVideos".VideoDescription as EpisodeDescription,
- "YouTubeVideos".ThumbnailURL as EpisodeArtwork,
- "YouTubeVideos".VideoURL as EpisodeURL,
- "YouTubeVideos".Duration as EpisodeDuration,
- "YouTubeVideos".VideoID as EpisodeID,
- "YouTubeVideos".ListenPosition as ListenDuration,
- "YouTubeVideos".Completed,
- CASE WHEN q.EpisodeID IS NOT NULL THEN true ELSE false END as is_queued,
- CASE WHEN s.EpisodeID IS NOT NULL THEN true ELSE false END as is_saved,
- CASE WHEN d.EpisodeID IS NOT NULL THEN true ELSE false END as is_downloaded,
- TRUE::boolean as is_youtube
- FROM "YouTubeVideos"
- INNER JOIN "Podcasts" ON "YouTubeVideos".PodcastID = "Podcasts".PodcastID
- LEFT JOIN "EpisodeQueue" q ON "YouTubeVideos".VideoID = q.EpisodeID AND q.UserID = %s
- LEFT JOIN "SavedEpisodes" s ON "YouTubeVideos".VideoID = s.EpisodeID AND s.UserID = %s
- LEFT JOIN "DownloadedEpisodes" d ON "YouTubeVideos".VideoID = d.EpisodeID AND d.UserID = %s
- WHERE "YouTubeVideos".VideoID = %s AND "Podcasts".UserID = %s
- """
- cursor.execute(query_youtube, (user_id, user_id, user_id, episode_id, user_id))
- result = cursor.fetchone()
-
- # If not found, try with system user (1)
- if not result:
- cursor.execute(query_youtube, (user_id, user_id, user_id, episode_id, 1))
- result = cursor.fetchone()
-
- elif person_episode:
- # First get the episode from PeopleEpisodes and match with Episodes using title and URL
- query_people = """
- SELECT pe.*,
- p.PodcastID, p.PodcastName, p.ArtworkURL as podcast_artwork,
- p.FeedURL, p.WebsiteURL, p.PodcastIndexID,
- e.EpisodeID as real_episode_id,
- COALESCE(pe.EpisodeArtwork, p.ArtworkURL) as final_artwork,
- CASE WHEN q.EpisodeID IS NOT NULL THEN true ELSE false END as is_queued,
- CASE WHEN s.EpisodeID IS NOT NULL THEN true ELSE false END as is_saved,
- CASE WHEN d.EpisodeID IS NOT NULL THEN true ELSE false END as is_downloaded,
- FALSE::boolean as is_youtube
- FROM "PeopleEpisodes" pe
- JOIN "Podcasts" p ON pe.PodcastID = p.PodcastID
- JOIN "Episodes" e ON (
- e.EpisodeTitle = pe.EpisodeTitle
- AND e.EpisodeURL = pe.EpisodeURL
- )
- LEFT JOIN "EpisodeQueue" q ON e.EpisodeID = q.EpisodeID AND q.UserID = %s
- LEFT JOIN "SavedEpisodes" s ON e.EpisodeID = s.EpisodeID AND s.UserID = %s
- LEFT JOIN "DownloadedEpisodes" d ON e.EpisodeID = d.EpisodeID AND d.UserID = %s
- WHERE pe.EpisodeID = %s
- """
- cursor.execute(query_people, (user_id, user_id, user_id, episode_id))
- people_episode = cursor.fetchone()
-
- if not people_episode:
- raise ValueError(f"No people episode found with ID {episode_id}")
-
- # Now get additional data using the real episode ID
- query_history = """
- SELECT "UserEpisodeHistory".ListenDuration, "Episodes".Completed
- FROM "Episodes"
- LEFT JOIN "UserEpisodeHistory" ON
- "Episodes".EpisodeID = "UserEpisodeHistory".EpisodeID
- AND "UserEpisodeHistory".UserID = %s
- WHERE "Episodes".EpisodeID = %s
- """
- cursor.execute(query_history, (user_id, people_episode['real_episode_id']))
- history_data = cursor.fetchone() or {}
-
- # Combine the data
- result = {
- 'episodetitle': people_episode['episodetitle'],
- 'podcastname': people_episode['podcastname'],
- 'podcastid': people_episode['podcastid'],
- 'podcastindexid': people_episode['podcastindexid'],
- 'feedurl': people_episode['feedurl'],
- 'episodepubdate': people_episode['episodepubdate'].isoformat() if people_episode['episodepubdate'] else None,
- 'episodedescription': people_episode['episodedescription'],
- 'episodeartwork': people_episode['final_artwork'],
- 'episodeurl': people_episode['episodeurl'],
- 'episodeduration': people_episode['episodeduration'],
- 'listenduration': history_data.get('listenduration'),
- 'episodeid': people_episode['real_episode_id'],
- 'completed': history_data.get('completed', False),
- 'is_queued': people_episode['is_queued'],
- 'is_saved': people_episode['is_saved'],
- 'is_downloaded': people_episode['is_downloaded']
- }
- else:
- # Original query for regular episodes
- query = """
- SELECT "Podcasts".PodcastID, "Podcasts".PodcastIndexID, "Podcasts".FeedURL,
- "Podcasts".PodcastName, "Podcasts".ArtworkURL, "Episodes".EpisodeTitle,
- "Episodes".EpisodePubDate, "Episodes".EpisodeDescription,
- "Episodes".EpisodeArtwork, "Episodes".EpisodeURL, "Episodes".EpisodeDuration,
- "Episodes".EpisodeID, "Podcasts".WebsiteURL,
- "UserEpisodeHistory".ListenDuration, "Episodes".Completed,
- CASE WHEN q.EpisodeID IS NOT NULL THEN true ELSE false END as is_queued,
- CASE WHEN s.EpisodeID IS NOT NULL THEN true ELSE false END as is_saved,
- CASE WHEN d.EpisodeID IS NOT NULL THEN true ELSE false END as is_downloaded,
- FALSE::boolean as is_youtube
- FROM "Episodes"
- INNER JOIN "Podcasts" ON "Episodes".PodcastID = "Podcasts".PodcastID
- LEFT JOIN "UserEpisodeHistory" ON
- "Episodes".EpisodeID = "UserEpisodeHistory".EpisodeID
- AND "Podcasts".UserID = "UserEpisodeHistory".UserID
- LEFT JOIN "EpisodeQueue" q ON "Episodes".EpisodeID = q.EpisodeID AND q.UserID = %s
- LEFT JOIN "SavedEpisodes" s ON "Episodes".EpisodeID = s.EpisodeID AND s.UserID = %s
- LEFT JOIN "DownloadedEpisodes" d ON "Episodes".EpisodeID = d.EpisodeID AND d.UserID = %s
- WHERE "Episodes".EpisodeID = %s AND "Podcasts".UserID = %s
- """
- cursor.execute(query, (user_id, user_id, user_id, episode_id, user_id))
- result = cursor.fetchone()
-
- # If not found, try with system user (1)
- if not result:
- cursor.execute(query, (user_id, user_id, user_id, episode_id, 1))
- result = cursor.fetchone()
-
- cursor.close()
-
- if not result:
- raise ValueError(f"No episode found with ID {episode_id}" +
- (" for person episode" if person_episode else f" for user {user_id}"))
-
- lower_row = lowercase_keys(result)
- bool_fix = convert_bools(lower_row, database_type)
- return bool_fix
-
-
- else:
- cursor = cnx.cursor(dictionary=True)
- if is_youtube:
- # MariaDB version of YouTube videos query
- query = """
- SELECT Podcasts.PodcastID, Podcasts.PodcastIndexID, Podcasts.FeedURL,
- Podcasts.PodcastName, Podcasts.ArtworkURL,
- YouTubeVideos.VideoTitle as EpisodeTitle,
- YouTubeVideos.PublishedAt as EpisodePubDate,
- YouTubeVideos.VideoDescription as EpisodeDescription,
- YouTubeVideos.ThumbnailURL as EpisodeArtwork,
- YouTubeVideos.VideoURL as EpisodeURL,
- YouTubeVideos.Duration as EpisodeDuration,
- YouTubeVideos.VideoID as EpisodeID,
- YouTubeVideos.ListenPosition as ListenDuration,
- YouTubeVideos.Completed,
- CASE WHEN q.EpisodeID IS NOT NULL THEN 1 ELSE 0 END as is_queued,
- CASE WHEN s.EpisodeID IS NOT NULL THEN 1 ELSE 0 END as is_saved,
- CASE WHEN d.EpisodeID IS NOT NULL THEN 1 ELSE 0 END as is_downloaded,
- 1 as is_youtube
- FROM YouTubeVideos
- INNER JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID
- LEFT JOIN EpisodeQueue q ON YouTubeVideos.VideoID = q.EpisodeID AND q.UserID = %s
- LEFT JOIN SavedEpisodes s ON YouTubeVideos.VideoID = s.EpisodeID AND s.UserID = %s
- LEFT JOIN DownloadedEpisodes d ON YouTubeVideos.VideoID = d.EpisodeID AND d.UserID = %s
- WHERE YouTubeVideos.VideoID = %s AND Podcasts.UserID = %s
- """
- cursor.execute(query, (user_id, user_id, user_id, episode_id, user_id))
- result = cursor.fetchone()
- elif person_episode:
- # MariaDB version of people episodes query
- query_people = """
- SELECT pe.*,
- p.PodcastID, p.PodcastName, p.ArtworkURL as podcast_artwork,
- p.FeedURL, p.WebsiteURL, p.PodcastIndexID,
- e.EpisodeID as real_episode_id,
- COALESCE(pe.EpisodeArtwork, p.ArtworkURL) as final_artwork,
- CASE WHEN q.EpisodeID IS NOT NULL THEN 1 ELSE 0 END as is_queued,
- CASE WHEN s.EpisodeID IS NOT NULL THEN 1 ELSE 0 END as is_saved,
- CASE WHEN d.EpisodeID IS NOT NULL THEN 1 ELSE 0 END as is_downloaded,
- FALSE as is_youtube
- FROM PeopleEpisodes pe
- JOIN Podcasts p ON pe.PodcastID = p.PodcastID
- JOIN Episodes e ON (
- e.EpisodeTitle = pe.EpisodeTitle
- AND e.EpisodeURL = pe.EpisodeURL
- )
- LEFT JOIN EpisodeQueue q ON e.EpisodeID = q.EpisodeID AND q.UserID = %s
- LEFT JOIN SavedEpisodes s ON e.EpisodeID = s.EpisodeID AND s.UserID = %s
- LEFT JOIN DownloadedEpisodes d ON e.EpisodeID = d.EpisodeID AND d.UserID = %s
- WHERE pe.EpisodeID = %s
- """
- cursor.execute(query_people, (user_id, user_id, user_id, episode_id))
- people_episode = cursor.fetchone()
-
- if not people_episode:
- raise ValueError(f"No people episode found with ID {episode_id}")
-
- # Get additional data using the real episode ID
- query_history = """
- SELECT UserEpisodeHistory.ListenDuration, Episodes.Completed
- FROM Episodes
- LEFT JOIN UserEpisodeHistory ON
- Episodes.EpisodeID = UserEpisodeHistory.EpisodeID
- AND UserEpisodeHistory.UserID = %s
- WHERE Episodes.EpisodeID = %s
- """
- cursor.execute(query_history, (user_id, people_episode['real_episode_id']))
- history_data = cursor.fetchone() or {}
-
- # Combine the data
- result = {
- 'episodetitle': people_episode['episodetitle'],
- 'podcastname': people_episode['podcastname'],
- 'podcastid': people_episode['podcastid'],
- 'podcastindexid': people_episode['podcastindexid'],
- 'feedurl': people_episode['feedurl'],
- 'episodepubdate': people_episode['episodepubdate'].isoformat() if people_episode['episodepubdate'] else None,
- 'episodedescription': people_episode['episodedescription'],
- 'episodeartwork': people_episode['final_artwork'],
- 'episodeurl': people_episode['episodeurl'],
- 'episodeduration': people_episode['episodeduration'],
- 'listenduration': history_data.get('listenduration'),
- 'episodeid': people_episode['real_episode_id'],
- 'completed': bool(history_data.get('completed', 0)),
- 'is_queued': bool(people_episode['is_queued']),
- 'is_saved': bool(people_episode['is_saved']),
- 'is_downloaded': bool(people_episode['is_downloaded'])
- }
- else:
- # MariaDB version of regular episodes query
- query = """
- SELECT Podcasts.PodcastID, Podcasts.PodcastIndexID, Podcasts.FeedURL,
- Podcasts.PodcastName, Podcasts.ArtworkURL, Episodes.EpisodeTitle,
- Episodes.EpisodePubDate, Episodes.EpisodeDescription,
- Episodes.EpisodeArtwork, Episodes.EpisodeURL, Episodes.EpisodeDuration,
- Episodes.EpisodeID, Podcasts.WebsiteURL,
- UserEpisodeHistory.ListenDuration, Episodes.Completed,
- CASE WHEN q.EpisodeID IS NOT NULL THEN 1 ELSE 0 END as is_queued,
- CASE WHEN s.EpisodeID IS NOT NULL THEN 1 ELSE 0 END as is_saved,
- CASE WHEN d.EpisodeID IS NOT NULL THEN 1 ELSE 0 END as is_downloaded,
- FALSE as is_youtube
- FROM Episodes
- INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID
- LEFT JOIN UserEpisodeHistory ON
- Episodes.EpisodeID = UserEpisodeHistory.EpisodeID
- AND Podcasts.UserID = UserEpisodeHistory.UserID
- LEFT JOIN EpisodeQueue q ON Episodes.EpisodeID = q.EpisodeID AND q.UserID = %s
- LEFT JOIN SavedEpisodes s ON Episodes.EpisodeID = s.EpisodeID AND s.UserID = %s
- LEFT JOIN DownloadedEpisodes d ON Episodes.EpisodeID = d.EpisodeID AND d.UserID = %s
- WHERE Episodes.EpisodeID = %s AND Podcasts.UserID = %s
- """
- cursor.execute(query, (user_id, user_id, user_id, episode_id, user_id))
- result = cursor.fetchone()
-
- # If not found, try with system user (1)
- if not result:
- cursor.execute(query, (user_id, user_id, user_id, episode_id, 1))
- result = cursor.fetchone()
-
- cursor.close()
-
- if not result:
- raise ValueError(f"No episode found with ID {episode_id}" +
- (" for person episode" if person_episode else f" for user {user_id}"))
-
- # Convert boolean fields for MariaDB
- if result:
- result['completed'] = bool(result.get('completed', 0))
- result['is_queued'] = bool(result.get('is_queued', 0))
- result['is_saved'] = bool(result.get('is_saved', 0))
- result['is_downloaded'] = bool(result.get('is_downloaded', 0))
- result['is_youtube'] = bool(result.get('is_youtube', 0))
-
- # Format date if present
- if result.get('episodepubdate'):
- result['episodepubdate'] = result['episodepubdate'].isoformat()
-
- lower_row = lowercase_keys(result)
- bool_fix = convert_bools(lower_row, database_type)
- return bool_fix
-
-
-def get_episode_metadata_id(database_type, cnx, episode_id):
- if database_type == "postgresql":
- from psycopg.rows import dict_row
- cnx.row_factory = dict_row
- cursor = cnx.cursor()
- query = """
- SELECT * FROM (
- SELECT
- "Podcasts".PodcastID,
- "Podcasts".FeedURL,
- "Podcasts".PodcastName,
- "Podcasts".ArtworkURL,
- "Episodes".EpisodeTitle,
- "Episodes".EpisodePubDate,
- "Episodes".EpisodeDescription,
- "Episodes".EpisodeArtwork,
- "Episodes".EpisodeURL,
- "Episodes".EpisodeDuration,
- "Episodes".EpisodeID,
- "Podcasts".WebsiteURL,
- "UserEpisodeHistory".ListenDuration,
- "Episodes".Completed,
- FALSE::boolean as is_youtube
- FROM "Episodes"
- INNER JOIN "Podcasts" ON "Episodes".PodcastID = "Podcasts".PodcastID
- LEFT JOIN "UserEpisodeHistory" ON
- "Episodes".EpisodeID = "UserEpisodeHistory".EpisodeID
- AND "Podcasts".UserID = "UserEpisodeHistory".UserID
- WHERE "Episodes".EpisodeID = %s
-
- UNION ALL
-
- SELECT
- "Podcasts".PodcastID,
- "Podcasts".FeedURL,
- "Podcasts".PodcastName,
- "Podcasts".ArtworkURL,
- "YouTubeVideos".VideoTitle as EpisodeTitle,
- "YouTubeVideos".PublishedAt as EpisodePubDate,
- "YouTubeVideos".VideoDescription as EpisodeDescription,
- "YouTubeVideos".ThumbnailURL as EpisodeArtwork,
- "YouTubeVideos".VideoURL as EpisodeURL,
- "YouTubeVideos".Duration as EpisodeDuration,
- "YouTubeVideos".VideoID as EpisodeID,
- "Podcasts".WebsiteURL,
- "YouTubeVideos".ListenPosition as ListenDuration,
- "YouTubeVideos".Completed,
- TRUE::boolean as is_youtube
- FROM "YouTubeVideos"
- INNER JOIN "Podcasts" ON "YouTubeVideos".PodcastID = "Podcasts".PodcastID
- WHERE "YouTubeVideos".VideoID = %s
- ) combined
- LIMIT 1
- """
- else: # MySQL or MariaDB
- cursor = cnx.cursor(dictionary=True)
- query = """
- SELECT * FROM (
- SELECT
- Podcasts.PodcastID,
- Podcasts.FeedURL,
- Podcasts.PodcastName,
- Podcasts.ArtworkURL,
- Episodes.EpisodeTitle,
- Episodes.EpisodePubDate,
- Episodes.EpisodeDescription,
- Episodes.EpisodeArtwork,
- Episodes.EpisodeURL,
- Episodes.EpisodeDuration,
- Episodes.EpisodeID,
- Podcasts.WebsiteURL,
- UserEpisodeHistory.ListenDuration,
- Episodes.Completed,
- FALSE as is_youtube
- FROM Episodes
- INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID
- LEFT JOIN UserEpisodeHistory ON
- Episodes.EpisodeID = UserEpisodeHistory.EpisodeID
- AND Podcasts.UserID = UserEpisodeHistory.UserID
- WHERE Episodes.EpisodeID = %s
-
- UNION ALL
-
- SELECT
- Podcasts.PodcastID,
- Podcasts.FeedURL,
- Podcasts.PodcastName,
- Podcasts.ArtworkURL,
- YouTubeVideos.VideoTitle as EpisodeTitle,
- YouTubeVideos.PublishedAt as EpisodePubDate,
- YouTubeVideos.VideoDescription as EpisodeDescription,
- YouTubeVideos.ThumbnailURL as EpisodeArtwork,
- YouTubeVideos.VideoURL as EpisodeURL,
- YouTubeVideos.Duration as EpisodeDuration,
- YouTubeVideos.VideoID as EpisodeID,
- Podcasts.WebsiteURL,
- YouTubeVideos.ListenPosition as ListenDuration,
- YouTubeVideos.Completed,
- TRUE as is_youtube
- FROM YouTubeVideos
- INNER JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID
- WHERE YouTubeVideos.VideoID = %s
- ) combined
- LIMIT 1
- """
-
- cursor.execute(query, (episode_id, episode_id))
- row = cursor.fetchone()
- cursor.close()
-
- if not row:
- raise ValueError(f"No episode found with ID {episode_id}")
-
- lower_row = lowercase_keys(row)
- bool_fix = convert_bools(lower_row, database_type)
- return bool_fix
-
-
-
-import logging
-
-def save_mfa_secret(database_type, cnx, user_id, mfa_secret):
- if database_type == "postgresql":
- cursor = cnx.cursor()
- query = 'UPDATE "Users" SET MFA_Secret = %s WHERE UserID = %s'
- else: # MySQL or MariaDB
- cursor = cnx.cursor(dictionary=True)
- query = "UPDATE Users SET MFA_Secret = %s WHERE UserID = %s"
-
- try:
- cursor.execute(query, (mfa_secret, user_id))
- cnx.commit()
- cursor.close()
- logging.info(f"Successfully saved MFA secret for user")
- return True
- except Exception as e:
- logging.error(f"Error saving MFA secret for user")
- return False
-
-
-def check_mfa_enabled(database_type, cnx, user_id):
- if database_type == "postgresql":
- cursor = cnx.cursor()
- query = 'SELECT MFA_Secret FROM "Users" WHERE UserID = %s'
- else: # MySQL or MariaDB
- cursor = cnx.cursor(dictionary=True)
- query = "SELECT MFA_Secret FROM Users WHERE UserID = %s"
-
- try:
- cursor.execute(query, (user_id,))
- result = cursor.fetchone()
- cursor.close()
-
- if result is None:
- return False
-
- # For PostgreSQL, the column name will be 'mfa_secret' in lowercase
- # For MySQL, the column name might be 'MFA_Secret' so we access it using lowercase
- if database_type != "postgresql":
- result = {k.lower(): v for k, v in result.items()}
-
- mfa_secret = result[0] if isinstance(result, tuple) else result.get('mfa_secret')
- return bool(mfa_secret)
- except Exception as e:
- print("Error checking MFA status:", e)
- return False
-
-
-
-
-def get_mfa_secret(database_type, cnx, user_id):
- if database_type == "postgresql":
- cursor = cnx.cursor()
- query = 'SELECT MFA_Secret FROM "Users" WHERE UserID = %s'
- else: # MySQL or MariaDB
- cursor = cnx.cursor(dictionary=True)
- query = "SELECT MFA_Secret FROM Users WHERE UserID = %s"
-
- try:
- cursor.execute(query, (user_id,))
- result = cursor.fetchone()
- cursor.close()
-
- if isinstance(result, tuple):
- # Convert result to dictionary format for consistency
- result = dict(zip([desc[0] for desc in cursor.description], result))
-
- if isinstance(result, dict):
- if database_type == 'postgresql':
- return result.get('mfa_secret')
- else:
- return result.get('MFA_Secret')
- else:
- print("Unexpected result format:", result)
- return None
- except Exception as e:
- print("Error retrieving MFA secret:", e)
- return None
-
-
-def delete_mfa_secret(database_type, cnx, user_id):
- if database_type == "postgresql":
- cursor = cnx.cursor()
- query = 'UPDATE "Users" SET MFA_Secret = NULL WHERE UserID = %s'
- else: # MySQL or MariaDB
- cursor = cnx.cursor(dictionary=True)
- query = "UPDATE Users SET MFA_Secret = NULL WHERE UserID = %s"
-
- try:
- cursor.execute(query, (user_id,))
- cnx.commit()
- cursor.close()
- return True
- except Exception as e:
- print("Error deleting MFA secret:", e)
- return False
-
-def setup_timezone_info(database_type, cnx, user_id, timezone, hour_pref, date_format):
- if database_type == "postgresql":
- cursor = cnx.cursor()
- query = (
- 'UPDATE "Users" SET Timezone = %s, TimeFormat = %s, DateFormat = %s, FirstLogin = %s WHERE UserID = %s'
- )
- else: # MySQL or MariaDB
- cursor = cnx.cursor(dictionary=True)
- query = (
- "UPDATE Users SET Timezone = %s, TimeFormat = %s, DateFormat = %s, FirstLogin = %s WHERE UserID = %s"
- )
-
- try:
- if database_type == "postgresql":
- cursor.execute(query, (timezone, hour_pref, date_format, True, user_id))
- else:
- cursor.execute(query, (timezone, hour_pref, date_format, 1, user_id))
- cnx.commit()
- cursor.close()
-
- return True
- except Exception as e:
- print("Error setting up time info:", e)
- return False
-
-
-
-def get_time_info(database_type, cnx, user_id):
- if database_type == "postgresql":
- from psycopg.rows import dict_row
- cnx.row_factory = dict_row
- cursor = cnx.cursor()
- query = 'SELECT Timezone, TimeFormat, DateFormat FROM "Users" WHERE UserID = %s'
- else: # MySQL or MariaDB
- cursor = cnx.cursor(dictionary=True)
- query = "SELECT Timezone, TimeFormat, DateFormat FROM Users WHERE UserID = %s"
-
- cursor.execute(query, (user_id,))
- result = cursor.fetchone()
- cursor.close()
-
- if result:
- # Check if result is a dict or tuple
- if isinstance(result, dict):
- # Handle both postgres (lowercase) and mysql (uppercase) dict keys
- timezone = result.get('timezone') or result.get('Timezone')
- timeformat = result.get('timeformat') or result.get('TimeFormat')
- dateformat = result.get('dateformat') or result.get('DateFormat')
- else:
- # Handle tuple result (order should match SELECT query)
- timezone, timeformat, dateformat = result
-
- return timezone, timeformat, dateformat
- else:
- return None, None, None
-
-
-def first_login_done(database_type, cnx, user_id):
- if database_type == "postgresql":
- from psycopg.rows import dict_row
- cnx.row_factory = dict_row
- cursor = cnx.cursor()
- query = 'SELECT FirstLogin FROM "Users" WHERE UserID = %s'
- else: # MySQL or MariaDB
- cursor = cnx.cursor(dictionary=True)
- query = "SELECT FirstLogin FROM Users WHERE UserID = %s"
-
- try:
- cursor.execute(query, (user_id,))
- result = cursor.fetchone()
- cursor.close()
-
- if database_type == "postgresql":
-
- first_login = result[0] if isinstance(result, tuple) else result['firstlogin']
- else:
- first_login = result[0] if isinstance(result, tuple) else result['FirstLogin']
- return first_login == 1
- except Exception as e:
- print("Error fetching first login status:", e)
- return False
-
-
-
-def delete_selected_episodes(cnx, database_type, selected_episodes, user_id):
- cursor = cnx.cursor()
- for episode_id in selected_episodes:
- # Get the download ID and location from the DownloadedEpisodes table
- query = (
- 'SELECT DownloadID, DownloadedLocation '
- 'FROM "DownloadedEpisodes" '
- 'WHERE EpisodeID = %s AND UserID = %s' if database_type == "postgresql" else
- "SELECT DownloadID, DownloadedLocation "
- "FROM DownloadedEpisodes "
- "WHERE EpisodeID = %s AND UserID = %s"
- )
- cursor.execute(query, (episode_id, user_id))
- result = cursor.fetchone()
-
- if not result:
- print(f"No matching download found for episode ID {episode_id}")
- continue
-
- download_id, downloaded_location = result
-
- # Delete the downloaded file
- os.remove(downloaded_location)
-
- # Remove the entry from the DownloadedEpisodes table
- query = (
- 'DELETE FROM "DownloadedEpisodes" WHERE DownloadID = %s' if database_type == "postgresql" else
- "DELETE FROM DownloadedEpisodes WHERE DownloadID = %s"
- )
- cursor.execute(query, (download_id,))
- cnx.commit()
- print(f"Removed {cursor.rowcount} entry from the DownloadedEpisodes table.")
-
- # Update UserStats table to decrement EpisodesDownloaded count
- query = (
- 'UPDATE "UserStats" SET EpisodesDownloaded = EpisodesDownloaded - 1 '
- 'WHERE UserID = %s' if database_type == "postgresql" else
- "UPDATE UserStats SET EpisodesDownloaded = EpisodesDownloaded - 1 WHERE UserID = %s"
- )
- cursor.execute(query, (user_id,))
-
- cursor.close()
-
- return "success"
-
-def search_data(database_type, cnx, search_term, user_id):
- if database_type == "postgresql":
- from psycopg.rows import dict_row
- cnx.row_factory = dict_row
- cursor = cnx.cursor()
- query = """
- SELECT
- p.PodcastID as podcastid,
- p.PodcastName as podcastname,
- p.ArtworkURL as artworkurl,
- p.Author as author,
- p.Categories as categories,
- p.Description as description,
- p.EpisodeCount as episodecount,
- p.FeedURL as feedurl,
- p.WebsiteURL as websiteurl,
- p.Explicit as explicit,
- p.UserID as userid,
- p.IsYouTubeChannel as is_youtube,
- COALESCE(e.EpisodeID, y.VideoID) as episodeid,
- COALESCE(e.EpisodeTitle, y.VideoTitle) as episodetitle,
- COALESCE(e.EpisodeDescription, y.VideoDescription) as episodedescription,
- COALESCE(e.EpisodeURL, y.VideoURL) as episodeurl,
- COALESCE(e.EpisodeArtwork, y.ThumbnailURL) as episodeartwork,
- COALESCE(e.EpisodePubDate, y.PublishedAt) as episodepubdate,
- COALESCE(e.EpisodeDuration, y.Duration) as episodeduration,
- CASE
- WHEN y.VideoID IS NOT NULL THEN y.ListenPosition
- ELSE h.ListenDuration
- END as listenduration,
- COALESCE(e.Completed, y.Completed) as completed,
- CASE
- WHEN y.VideoID IS NOT NULL THEN
- CASE WHEN sv.VideoID IS NOT NULL THEN TRUE ELSE FALSE END
- ELSE
- CASE WHEN se.EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END
- END as saved,
- CASE
- WHEN y.VideoID IS NOT NULL THEN
- CASE WHEN eq.EpisodeID IS NOT NULL AND eq.is_youtube = TRUE THEN TRUE ELSE FALSE END
- ELSE
- CASE WHEN eq.EpisodeID IS NOT NULL AND eq.is_youtube = FALSE THEN TRUE ELSE FALSE END
- END as queued,
- CASE
- WHEN y.VideoID IS NOT NULL THEN
- CASE WHEN dv.VideoID IS NOT NULL THEN TRUE ELSE FALSE END
- ELSE
- CASE WHEN de.EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END
- END as downloaded
- FROM "Podcasts" p
- LEFT JOIN (
- SELECT * FROM "Episodes" WHERE EpisodeTitle ILIKE %s OR EpisodeDescription ILIKE %s
- ) e ON p.PodcastID = e.PodcastID
- LEFT JOIN (
- SELECT * FROM "YouTubeVideos" WHERE VideoTitle ILIKE %s OR VideoDescription ILIKE %s
- ) y ON p.PodcastID = y.PodcastID
- LEFT JOIN "UserEpisodeHistory" h ON
- (e.EpisodeID = h.EpisodeID AND h.UserID = %s)
- LEFT JOIN "SavedEpisodes" se ON
- (e.EpisodeID = se.EpisodeID AND se.UserID = %s)
- LEFT JOIN "SavedVideos" sv ON
- (y.VideoID = sv.VideoID AND sv.UserID = %s)
- LEFT JOIN "EpisodeQueue" eq ON
- ((e.EpisodeID = eq.EpisodeID OR y.VideoID = eq.EpisodeID) AND eq.UserID = %s)
- LEFT JOIN "DownloadedEpisodes" de ON
- (e.EpisodeID = de.EpisodeID AND de.UserID = %s)
- LEFT JOIN "DownloadedVideos" dv ON
- (y.VideoID = dv.VideoID AND dv.UserID = %s)
- WHERE p.UserID = %s
- AND (e.EpisodeID IS NOT NULL OR y.VideoID IS NOT NULL)
- """
- else: # MySQL or MariaDB
- cursor = cnx.cursor(dictionary=True)
- query = """
- SELECT
- p.PodcastID as podcastid,
- p.PodcastName as podcastname,
- p.ArtworkURL as artworkurl,
- p.Author as author,
- p.Categories as categories,
- p.Description as description,
- p.EpisodeCount as episodecount,
- p.FeedURL as feedurl,
- p.WebsiteURL as websiteurl,
- p.Explicit as explicit,
- p.UserID as userid,
- p.IsYouTubeChannel as is_youtube,
- COALESCE(e.EpisodeID, y.VideoID) as episodeid,
- COALESCE(e.EpisodeTitle, y.VideoTitle) as episodetitle,
- COALESCE(e.EpisodeDescription, y.VideoDescription) as episodedescription,
- COALESCE(e.EpisodeURL, y.VideoURL) as episodeurl,
- COALESCE(e.EpisodeArtwork, y.ThumbnailURL) as episodeartwork,
- COALESCE(e.EpisodePubDate, y.PublishedAt) as episodepubdate,
- COALESCE(e.EpisodeDuration, y.Duration) as episodeduration,
- CASE
- WHEN y.VideoID IS NOT NULL THEN y.ListenPosition
- ELSE h.ListenDuration
- END as listenduration,
- COALESCE(e.Completed, y.Completed) as completed,
- CASE
- WHEN y.VideoID IS NOT NULL THEN
- CASE WHEN sv.VideoID IS NOT NULL THEN 1 ELSE 0 END
- ELSE
- CASE WHEN se.EpisodeID IS NOT NULL THEN 1 ELSE 0 END
- END as saved,
- CASE
- WHEN y.VideoID IS NOT NULL THEN
- CASE WHEN eq.EpisodeID IS NOT NULL AND eq.is_youtube = 1 THEN 1 ELSE 0 END
- ELSE
- CASE WHEN eq.EpisodeID IS NOT NULL AND eq.is_youtube = 0 THEN 1 ELSE 0 END
- END as queued,
- CASE
- WHEN y.VideoID IS NOT NULL THEN
- CASE WHEN dv.VideoID IS NOT NULL THEN 1 ELSE 0 END
- ELSE
- CASE WHEN de.EpisodeID IS NOT NULL THEN 1 ELSE 0 END
- END as downloaded
- FROM Podcasts p
- LEFT JOIN (
- SELECT * FROM Episodes WHERE EpisodeTitle LIKE %s OR EpisodeDescription LIKE %s
- ) e ON p.PodcastID = e.PodcastID
- LEFT JOIN (
- SELECT * FROM YouTubeVideos WHERE VideoTitle LIKE %s OR VideoDescription LIKE %s
- ) y ON p.PodcastID = y.PodcastID
- LEFT JOIN UserEpisodeHistory h ON
- (e.EpisodeID = h.EpisodeID AND h.UserID = %s)
- LEFT JOIN SavedEpisodes se ON
- (e.EpisodeID = se.EpisodeID AND se.UserID = %s)
- LEFT JOIN SavedVideos sv ON
- (y.VideoID = sv.VideoID AND sv.UserID = %s)
- LEFT JOIN EpisodeQueue eq ON
- ((e.EpisodeID = eq.EpisodeID OR y.VideoID = eq.EpisodeID) AND eq.UserID = %s)
- LEFT JOIN DownloadedEpisodes de ON
- (e.EpisodeID = de.EpisodeID AND de.UserID = %s)
- LEFT JOIN DownloadedVideos dv ON
- (y.VideoID = dv.VideoID AND dv.UserID = %s)
- WHERE p.UserID = %s
- AND (e.EpisodeID IS NOT NULL OR y.VideoID IS NOT NULL)
- """
-
- # Add wildcards for the LIKE/ILIKE clause
- search_term = f"%{search_term}%"
-
- # We now need 11 parameters: search_term (4x), user_id (7x)
- params = (search_term, search_term, search_term, search_term, user_id, user_id, user_id, user_id, user_id, user_id, user_id)
-
- try:
- start = time.time()
- cursor.execute(query, params)
- result = cursor.fetchall()
- end = time.time()
- logging.info(f"Query executed in {end - start} seconds.")
- cursor.close()
-
- if not result:
- return []
-
- # Convert column names to lowercase for MySQL
- result = lowercase_keys(result)
-
- # Post-process the results to cast boolean to integer for the 'explicit' field
- if database_type == "postgresql":
- for row in result:
- if 'explicit' in row:
- row['explicit'] = 1 if row['explicit'] else 0
-
- if database_type != "postgresql":
- bool_fields = ['is_youtube', 'completed', 'saved', 'queued', 'downloaded']
- for row in result:
- for field in bool_fields:
- if field in row:
- row[field] = bool(row.get(field, 0))
-
- return result
-
- except Exception as e:
- logging.error(f"Error retrieving Podcast Episodes: {e}")
- return None
-
-
-def queue_pod(database_type, cnx, episode_id, user_id, is_youtube=False):
- if database_type == "postgresql":
- from psycopg.rows import dict_row
- cnx.row_factory = dict_row
- cursor = cnx.cursor()
- query_get_max_pos = (
- 'SELECT MAX(QueuePosition) AS max_pos FROM "EpisodeQueue" '
- 'WHERE UserID = %s'
- )
- else:
- cursor = cnx.cursor(dictionary=True)
- query_get_max_pos = (
- "SELECT MAX(QueuePosition) AS max_pos FROM EpisodeQueue "
- "WHERE UserID = %s"
- )
-
- cursor.execute(query_get_max_pos, (user_id,))
- result = cursor.fetchone()
- max_pos = result['max_pos'] if result['max_pos'] else 0
-
- # Insert the new item into the queue
- query_queue_pod = (
- 'INSERT INTO "EpisodeQueue"(UserID, EpisodeID, QueuePosition, is_youtube) '
- 'VALUES (%s, %s, %s, %s)' if database_type == "postgresql" else
- "INSERT INTO EpisodeQueue(UserID, EpisodeID, QueuePosition, is_youtube) "
- "VALUES (%s, %s, %s, %s)"
- )
-
- new_pos = max_pos + 1
- try:
- start = time.time()
- cursor.execute(query_queue_pod, (user_id, episode_id, new_pos, is_youtube))
- cnx.commit()
- end = time.time()
- print(f"Query executed in {end - start} seconds.")
- except Exception as e:
- print(f"Error queueing {'video' if is_youtube else 'episode'}:", e)
- return None
- return f"{'Video' if is_youtube else 'Episode'} queued successfully."
-
-def reorder_queued_episodes(database_type, cnx, user_id, episode_ids):
- if database_type == "postgresql":
- from psycopg.rows import dict_row
- cnx.row_factory = dict_row
- cursor = cnx.cursor()
- query_update_position = (
- 'UPDATE "EpisodeQueue" SET QueuePosition = %s '
- 'WHERE UserID = %s AND EpisodeID = %s'
- )
- else: # MySQL or MariaDB
- cursor = cnx.cursor(dictionary=True)
- query_update_position = (
- "UPDATE EpisodeQueue SET QueuePosition = %s "
- "WHERE UserID = %s AND EpisodeID = %s"
- )
-
- try:
- start = time.time()
-
- # Update the position of each episode in the order they appear in the list
- for position, episode_id in enumerate(episode_ids, start=1):
- cursor.execute(query_update_position, (position, user_id, episode_id))
-
- cnx.commit() # Commit the changes
- end = time.time()
- print(f"Query executed in {end - start} seconds.")
- return True
- except Exception as e:
- print("Error reordering Podcast Episodes:", e)
- return False
-
-
-
-def check_queued(database_type, cnx, episode_id, user_id, is_youtube=False):
- if database_type == "postgresql":
- from psycopg.rows import dict_row
- cnx.row_factory = dict_row
- cursor = cnx.cursor()
- query = """
- SELECT * FROM "EpisodeQueue"
- WHERE EpisodeID = %s AND UserID = %s AND is_youtube = %s
- """
- else: # MySQL or MariaDB
- cursor = cnx.cursor(dictionary=True)
- query = """
- SELECT * FROM EpisodeQueue
- WHERE EpisodeID = %s AND UserID = %s AND is_youtube = %s
- """
- cursor.execute(query, (episode_id, user_id, is_youtube))
- result = cursor.fetchone()
- cursor.close()
- return True if result else False
-
-def get_queue_value(result, key, default=None):
- """
- Helper function to extract value from result set.
- It handles both dictionaries and tuples.
- """
- key_lower = key.lower()
- if isinstance(result, dict):
- return result.get(key_lower, default)
- elif isinstance(result, tuple):
- # Define a mapping of field names to their tuple indices for your specific queries
- key_map = {
- "episodeid": 0,
- "queueposition": 1
- }
- index = key_map.get(key_lower)
- return result[index] if index is not None else default
- return default
-
-
-def remove_queued_pod(database_type, cnx, episode_id, user_id, is_youtube=False):
- print(f'ep id: {episode_id}')
- if database_type == "postgresql":
- from psycopg.rows import dict_row
- cnx.row_factory = dict_row
- cursor = cnx.cursor()
- if is_youtube:
- get_queue_data_query = """
- SELECT "EpisodeQueue".EpisodeID, "EpisodeQueue".QueuePosition
- FROM "EpisodeQueue"
- INNER JOIN "YouTubeVideos" ON "EpisodeQueue".EpisodeID = "YouTubeVideos".VideoID
- WHERE "YouTubeVideos".VideoID = %s AND "EpisodeQueue".UserID = %s AND "EpisodeQueue".is_youtube = TRUE
- """
- else:
- get_queue_data_query = """
- SELECT "EpisodeQueue".EpisodeID, "EpisodeQueue".QueuePosition
- FROM "EpisodeQueue"
- INNER JOIN "Episodes" ON "EpisodeQueue".EpisodeID = "Episodes".EpisodeID
- WHERE "Episodes".EpisodeID = %s AND "EpisodeQueue".UserID = %s AND "EpisodeQueue".is_youtube = FALSE
- """
- else: # MySQL or MariaDB
- cursor = cnx.cursor(dictionary=True)
- if is_youtube:
- get_queue_data_query = """
- SELECT EpisodeQueue.EpisodeID, EpisodeQueue.QueuePosition
- FROM EpisodeQueue
- INNER JOIN YouTubeVideos ON EpisodeQueue.EpisodeID = YouTubeVideos.VideoID
- WHERE YouTubeVideos.VideoID = %s AND EpisodeQueue.UserID = %s AND EpisodeQueue.is_youtube = TRUE
- """
- else:
- get_queue_data_query = """
- SELECT EpisodeQueue.EpisodeID, EpisodeQueue.QueuePosition
- FROM EpisodeQueue
- INNER JOIN Episodes ON EpisodeQueue.EpisodeID = Episodes.EpisodeID
- WHERE Episodes.EpisodeID = %s AND EpisodeQueue.UserID = %s AND EpisodeQueue.is_youtube = FALSE
- """
-
- cursor.execute(get_queue_data_query, (episode_id, user_id))
- queue_data = cursor.fetchone()
- print(f"Queue data: {queue_data}")
-
- if queue_data is None:
- print(f"No queued {'video' if is_youtube else 'episode'} found with ID {episode_id}")
- cursor.close()
- return None
-
- removed_queue_position = queue_data['queueposition'] if database_type == "postgresql" else queue_data['QueuePosition']
- print(f'delete on the way')
-
- delete_query = (
- 'DELETE FROM "EpisodeQueue" WHERE UserID = %s AND EpisodeID = %s AND is_youtube = %s' if database_type == "postgresql" else
- "DELETE FROM EpisodeQueue WHERE UserID = %s AND EpisodeID = %s AND is_youtube = %s"
- )
- cursor.execute(delete_query, (user_id, episode_id, is_youtube))
- affected_rows = cursor.rowcount
- print(f'Rows affected by delete: {affected_rows}')
-
- if affected_rows == 0:
- print(f"No rows were deleted. UserID: {user_id}, {'VideoID' if is_youtube else 'EpisodeID'}: {episode_id}")
- return {"status": "error", "message": "No matching row found for deletion"}
-
- print(f'{"video" if is_youtube else "episode"} deleted')
- cnx.commit()
-
- update_queue_query = (
- 'UPDATE "EpisodeQueue" SET QueuePosition = QueuePosition - 1 WHERE UserID = %s AND QueuePosition > %s AND is_youtube = %s' if database_type == "postgresql" else
- "UPDATE EpisodeQueue SET QueuePosition = QueuePosition - 1 WHERE UserID = %s AND QueuePosition > %s AND is_youtube = %s"
- )
- cursor.execute(update_queue_query, (user_id, removed_queue_position, is_youtube))
- cnx.commit()
- print(f"Successfully removed {'video' if is_youtube else 'episode'} from queue.")
- cursor.close()
- return {"data": "Successfully Removed Episode From Queue"}
-
-
-def get_queued_episodes(database_type, cnx, user_id):
- if database_type == "postgresql":
- from psycopg.rows import dict_row
- cnx.row_factory = dict_row
- cursor = cnx.cursor()
- get_queued_episodes_query = """
- SELECT * FROM (
- SELECT
- "Episodes".EpisodeTitle as episodetitle,
- "Podcasts".PodcastName as podcastname,
- "Episodes".EpisodePubDate as episodepubdate,
- "Episodes".EpisodeDescription as episodedescription,
- "Episodes".EpisodeArtwork as episodeartwork,
- "Episodes".EpisodeURL as episodeurl,
- "EpisodeQueue".QueuePosition as queueposition,
- "Episodes".EpisodeDuration as episodeduration,
- "EpisodeQueue".QueueDate as queuedate,
- "UserEpisodeHistory".ListenDuration as listenduration,
- "Episodes".EpisodeID as episodeid,
- "Episodes".Completed as completed,
- CASE WHEN "SavedEpisodes".EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS saved,
- TRUE as queued,
- CASE WHEN "DownloadedEpisodes".EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded,
- FALSE as is_youtube
- FROM "EpisodeQueue"
- INNER JOIN "Episodes" ON "EpisodeQueue".EpisodeID = "Episodes".EpisodeID
- INNER JOIN "Podcasts" ON "Episodes".PodcastID = "Podcasts".PodcastID
- LEFT JOIN "UserEpisodeHistory" ON
- "EpisodeQueue".EpisodeID = "UserEpisodeHistory".EpisodeID
- AND "EpisodeQueue".UserID = "UserEpisodeHistory".UserID
- LEFT JOIN "SavedEpisodes" ON
- "EpisodeQueue".EpisodeID = "SavedEpisodes".EpisodeID
- AND "SavedEpisodes".UserID = %s
- LEFT JOIN "DownloadedEpisodes" ON
- "EpisodeQueue".EpisodeID = "DownloadedEpisodes".EpisodeID
- AND "DownloadedEpisodes".UserID = %s
- WHERE "EpisodeQueue".UserID = %s
- AND "EpisodeQueue".is_youtube = FALSE
-
- UNION ALL
-
- SELECT
- "YouTubeVideos".VideoTitle as episodetitle,
- "Podcasts".PodcastName as podcastname,
- "YouTubeVideos".PublishedAt as episodepubdate,
- "YouTubeVideos".VideoDescription as episodedescription,
- "YouTubeVideos".ThumbnailURL as episodeartwork,
- "YouTubeVideos".VideoURL as episodeurl,
- "EpisodeQueue".QueuePosition as queueposition,
- "YouTubeVideos".Duration as episodeduration,
- "EpisodeQueue".QueueDate as queuedate,
- "YouTubeVideos".ListenPosition as listenduration,
- "YouTubeVideos".VideoID as episodeid,
- "YouTubeVideos".Completed as completed,
- CASE WHEN "SavedVideos".VideoID IS NOT NULL THEN TRUE ELSE FALSE END AS saved,
- TRUE as queued,
- CASE WHEN "DownloadedVideos".VideoID IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded,
- TRUE as is_youtube
- FROM "EpisodeQueue"
- INNER JOIN "YouTubeVideos" ON "EpisodeQueue".EpisodeID = "YouTubeVideos".VideoID
- INNER JOIN "Podcasts" ON "YouTubeVideos".PodcastID = "Podcasts".PodcastID
- LEFT JOIN "SavedVideos" ON
- "EpisodeQueue".EpisodeID = "SavedVideos".VideoID
- AND "SavedVideos".UserID = %s
- LEFT JOIN "DownloadedVideos" ON
- "EpisodeQueue".EpisodeID = "DownloadedVideos".VideoID
- AND "DownloadedVideos".UserID = %s
- WHERE "EpisodeQueue".UserID = %s
- AND "EpisodeQueue".is_youtube = TRUE
- ) combined
- ORDER BY queueposition ASC
- """
- else: # MySQL or MariaDB
- cursor = cnx.cursor(dictionary=True)
- get_queued_episodes_query = """
- SELECT * FROM (
- SELECT
- Episodes.EpisodeTitle as episodetitle,
- Podcasts.PodcastName as podcastname,
- Episodes.EpisodePubDate as episodepubdate,
- Episodes.EpisodeDescription as episodedescription,
- Episodes.EpisodeArtwork as episodeartwork,
- Episodes.EpisodeURL as episodeurl,
- EpisodeQueue.QueuePosition as queueposition,
- Episodes.EpisodeDuration as episodeduration,
- EpisodeQueue.QueueDate as queuedate,
- UserEpisodeHistory.ListenDuration as listenduration,
- Episodes.EpisodeID as episodeid,
- Episodes.Completed as completed,
- CASE WHEN SavedEpisodes.EpisodeID IS NOT NULL THEN 1 ELSE 0 END AS saved,
- 1 as queued,
- CASE WHEN DownloadedEpisodes.EpisodeID IS NOT NULL THEN 1 ELSE 0 END AS downloaded,
- 0 as is_youtube
- FROM EpisodeQueue
- INNER JOIN Episodes ON EpisodeQueue.EpisodeID = Episodes.EpisodeID
- INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID
- LEFT JOIN UserEpisodeHistory ON
- EpisodeQueue.EpisodeID = UserEpisodeHistory.EpisodeID
- AND EpisodeQueue.UserID = UserEpisodeHistory.UserID
- LEFT JOIN SavedEpisodes ON
- EpisodeQueue.EpisodeID = SavedEpisodes.EpisodeID
- AND SavedEpisodes.UserID = %s
- LEFT JOIN DownloadedEpisodes ON
- EpisodeQueue.EpisodeID = DownloadedEpisodes.EpisodeID
- AND DownloadedEpisodes.UserID = %s
- WHERE EpisodeQueue.UserID = %s
- AND EpisodeQueue.is_youtube = FALSE
-
- UNION ALL
-
- SELECT
- YouTubeVideos.VideoTitle as episodetitle,
- Podcasts.PodcastName as podcastname,
- YouTubeVideos.PublishedAt as episodepubdate,
- YouTubeVideos.VideoDescription as episodedescription,
- YouTubeVideos.ThumbnailURL as episodeartwork,
- YouTubeVideos.VideoURL as episodeurl,
- EpisodeQueue.QueuePosition as queueposition,
- YouTubeVideos.Duration as episodeduration,
- EpisodeQueue.QueueDate as queuedate,
- YouTubeVideos.ListenPosition as listenduration,
- YouTubeVideos.VideoID as episodeid,
- YouTubeVideos.Completed as completed,
- CASE WHEN SavedVideos.VideoID IS NOT NULL THEN 1 ELSE 0 END AS saved,
- 1 as queued,
- CASE WHEN DownloadedVideos.VideoID IS NOT NULL THEN 1 ELSE 0 END AS downloaded,
- 1 as is_youtube
- FROM EpisodeQueue
- INNER JOIN YouTubeVideos ON EpisodeQueue.EpisodeID = YouTubeVideos.VideoID
- INNER JOIN Podcasts ON YouTubeVideos.PodcastID = Podcasts.PodcastID
- LEFT JOIN SavedVideos ON
- EpisodeQueue.EpisodeID = SavedVideos.VideoID
- AND SavedVideos.UserID = %s
- LEFT JOIN DownloadedVideos ON
- EpisodeQueue.EpisodeID = DownloadedVideos.VideoID
- AND DownloadedVideos.UserID = %s
- WHERE EpisodeQueue.UserID = %s
- AND EpisodeQueue.is_youtube = TRUE
- ) combined
- ORDER BY queueposition ASC
- """
-
- # We now need 6 user_id parameters: 3 for each union part
- cursor.execute(get_queued_episodes_query, (user_id, user_id, user_id, user_id, user_id, user_id))
- queued_episodes = cursor.fetchall()
- cursor.close()
- queued_episodes = lowercase_keys(queued_episodes)
-
- if database_type != "postgresql":
- bool_fields = ['completed', 'saved', 'queued', 'downloaded', 'is_youtube']
- for episode in queued_episodes:
- for field in bool_fields:
- if field in episode:
- episode[field] = bool(episode[field])
-
- return queued_episodes
-
-def check_episode_exists(cnx, database_type, user_id, episode_title, episode_url):
- cursor = cnx.cursor()
- query = """
- SELECT EXISTS(
- SELECT 1 FROM "Episodes"
- JOIN "Podcasts" ON "Episodes".PodcastID = "Podcasts".PodcastID
- WHERE "Podcasts".UserID = %s AND "Episodes".EpisodeTitle = %s AND "Episodes".EpisodeURL = %s
- )
- """ if database_type == "postgresql" else """
- SELECT EXISTS(
- SELECT 1 FROM Episodes
- JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID
- WHERE Podcasts.UserID = %s AND Episodes.EpisodeTitle = %s AND Episodes.EpisodeURL = %s
- )
- """
- cursor.execute(query, (user_id, episode_title, episode_url))
- result = cursor.fetchone()
- cursor.close()
-
- # Check if result is a dictionary or a tuple
- if isinstance(result, dict):
- return result['exists'] == 1
- elif isinstance(result, tuple):
- return result[0] == 1
- else:
- raise TypeError("Unexpected type for 'result'")
-
-
-def add_shared_episode(database_type, cnx, episode_id, url_key, expiration_date):
- cursor = cnx.cursor()
-
- if database_type == "postgresql":
- query = '''
- INSERT INTO "SharedEpisodes" (EpisodeID, UrlKey, ExpirationDate)
- VALUES (%s, %s, %s)
- '''
- else: # MySQL/MariaDB version
- query = '''
- INSERT INTO SharedEpisodes (EpisodeID, UrlKey, ExpirationDate)
- VALUES (%s, %s, %s)
- '''
-
- try:
- cursor.execute(query, (episode_id, url_key, expiration_date))
- cnx.commit() # Commit the changes
- cursor.close()
- return True
- except Exception as e:
- print(f"Error sharing episode: {e}")
- cursor.close()
- return False
-
-def cleanup_old_episodes(cnx, database_type):
- """
- Master cleanup function that handles both PeopleEpisodes and SharedEpisodes tables
- """
- cleanup_old_people_episodes(cnx, database_type)
- cleanup_expired_shared_episodes(cnx, database_type)
-
-def cleanup_old_people_episodes(cnx, database_type, days=30):
- """
- Remove episodes from PeopleEpisodes that are older than the specified number of days
- """
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- delete_query = """
- DELETE FROM "PeopleEpisodes"
- WHERE AddedDate < CURRENT_TIMESTAMP - INTERVAL '%s days'
- """
- else: # MySQL or MariaDB
- delete_query = """
- DELETE FROM PeopleEpisodes
- WHERE AddedDate < DATE_SUB(NOW(), INTERVAL %s DAY)
- """
-
- cursor.execute(delete_query, (days,))
- deleted_count = cursor.rowcount
- print(f"Cleaned up {deleted_count} episodes older than {days} days from PeopleEpisodes")
- cnx.commit()
-
- except Exception as e:
- print(f"Error during PeopleEpisodes cleanup: {str(e)}")
- cnx.rollback()
- finally:
- cursor.close()
-
-def cleanup_expired_shared_episodes(cnx, database_type):
- """
- Remove expired episodes from SharedEpisodes based on ExpirationDate
- """
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- delete_query = """
- DELETE FROM "SharedEpisodes"
- WHERE ExpirationDate < CURRENT_TIMESTAMP
- """
- else: # MySQL or MariaDB
- delete_query = """
- DELETE FROM SharedEpisodes
- WHERE ExpirationDate < NOW()
- """
-
- cursor.execute(delete_query)
- deleted_count = cursor.rowcount
- print(f"Cleaned up {deleted_count} expired episodes from SharedEpisodes")
- cnx.commit()
-
- except Exception as e:
- print(f"Error during SharedEpisodes cleanup: {str(e)}")
- cnx.rollback()
- finally:
- cursor.close()
-
-
-def build_playlist_query(playlist, database_type):
- # Debug the incoming playlist data
- print(f"DEBUG - Playlist time filter value: {playlist.get('timefilterhours')}")
- print(f"DEBUG - Playlist keys: {list(playlist.keys())}")
-
- # Check and print the progress threshold values
- progress_min = playlist.get('playprogressmin')
- progress_max = playlist.get('playprogressmax')
- print(f"DEBUG - Progress min value: {progress_min}")
- print(f"DEBUG - Progress max value: {progress_max}")
-
- conditions = []
- params = []
-
- # Check if this is a system playlist (owned by user 1)
- is_system_playlist = playlist['userid'] == 1 and playlist['issystemplaylist']
- playlist_name = playlist.get('name', '')
-
- # Special case handling for playlists that need to filter by user listening history
- needs_user_history = playlist_name in ['Currently Listening', 'Almost Done'] or not is_system_playlist
-
- # Ensure Fresh Releases has time filter set
- if playlist_name == 'Fresh Releases' and playlist.get('timefilterhours') is None:
- playlist['timefilterhours'] = 24
- print(f"Setting default 24 hour time filter for Fresh Releases playlist")
-
- if database_type == "postgresql":
- # Special case for playlists that filter by user listening progress
- if playlist['includepartiallyplayed'] and not playlist['includeunplayed'] and not playlist['includeplayed']:
- # Base query for partially played episodes - IMPORTANT: Include all ORDER BY columns in SELECT
- query = """
- SELECT DISTINCT e.episodeid, e.episodepubdate
- FROM "Episodes" e
- JOIN "Podcasts" p ON e.podcastid = p.podcastid
- JOIN "UserEpisodeHistory" h ON e.episodeid = h.episodeid
- WHERE h.listenduration > 0
- AND h.listenduration < e.episodeduration
- AND e.Completed = FALSE
- AND e.episodeduration > 0
- """
- params = []
-
- # Add progress min filter if specified - this drives the Almost Done functionality
- if progress_min is not None:
- min_decimal = float(progress_min) / 100.0
- # Use %s parameter placeholder for safety
- query += ' AND (h.listenduration::float / e.episodeduration::float) >= %s'
- params.append(min_decimal)
- print(f"Adding progress min filter: {min_decimal} ({progress_min}% complete)")
-
- # Add progress max filter if specified
- if progress_max is not None:
- max_decimal = float(progress_max) / 100.0
- query += ' AND (h.listenduration::float / e.episodeduration::float) <= %s'
- params.append(max_decimal)
- print(f"Adding progress max filter: {max_decimal}")
-
- print(f"Special query for in-progress playlist with filters")
-
- # Add sort order
- if playlist['sortorder']:
- sort_mapping = {
- 'date_asc': 'e.episodepubdate ASC',
- 'date_desc': 'e.episodepubdate DESC',
- 'duration_asc': 'e.episodeduration ASC',
- 'duration_desc': 'e.episodeduration DESC',
- 'listen_progress': '(h.listenduration::float / e.episodeduration::float) DESC',
- 'completion': '(h.listenduration::float / e.episodeduration::float) DESC'
- }
- order_by = sort_mapping.get(playlist['sortorder'], 'e.episodepubdate DESC')
- query += f" ORDER BY {order_by}"
-
- else:
- # Basic query structure depends on playlist type
- if is_system_playlist:
- if needs_user_history:
- # System playlist that needs user listening history (e.g., Currently Listening)
- query = """
- SELECT e.episodeid
- FROM "Episodes" e
- JOIN "Podcasts" p ON e.podcastid = p.podcastid
- LEFT JOIN "UserEpisodeHistory" h ON e.episodeid = h.episodeid AND h.userid = %s
- JOIN "Users" u ON u.UserID = %s
- WHERE 1=1
- """
- params.extend([playlist['userid'], playlist['userid']])
- else:
- # System playlist that doesn't need user history filtering (e.g., Fresh Releases)
- query = """
- SELECT e.episodeid
- FROM "Episodes" e
- JOIN "Podcasts" p ON e.podcastid = p.podcastid
- LEFT JOIN "UserEpisodeHistory" h ON e.episodeid = h.episodeid
- JOIN "Users" u ON u.UserID = %s
- WHERE 1=1
- """
- params.extend([playlist['userid']]) # Only needed for timezone
-
- print(f"System playlist detected - showing all podcasts")
- else:
- # User-specific playlist - only show user's podcasts
- query = """
- SELECT e.episodeid
- FROM "Episodes" e
- JOIN "Podcasts" p ON e.podcastid = p.podcastid
- LEFT JOIN "UserEpisodeHistory" h ON e.episodeid = h.episodeid AND h.userid = %s
- JOIN "Users" u ON u.UserID = %s
- WHERE p.UserID = %s
- """
- params.extend([playlist['userid'], playlist['userid'], playlist['userid']])
- print(f"User playlist detected - only showing podcasts for user {playlist['userid']}")
-
- # Podcast filter for PostgreSQL
- if playlist['podcastids']:
- conditions.append('e.podcastid = ANY(%s)')
- params.append(playlist['podcastids'])
-
- # Duration filters
- if playlist['minduration'] is not None:
- conditions.append('e.episodeduration >= %s')
- params.append(playlist['minduration'])
- if playlist['maxduration'] is not None:
- conditions.append('e.episodeduration <= %s')
- params.append(playlist['maxduration'])
-
- # Play state filters with progress
- play_state_conditions = []
-
- if playlist['includeunplayed']:
- play_state_conditions.append('h.listenduration IS NULL')
-
- if playlist['includepartiallyplayed']:
- # Base condition: episodes with some progress but not fully listened
- partial_condition = '(h.listenduration > 0 AND h.listenduration < e.episodeduration AND e.Completed = FALSE)'
-
- # Add progress range conditions if specified
- if playlist.get('playprogressmin') is not None:
- min_decimal = float(playlist["playprogressmin"]) / 100.0
- partial_condition += f' AND (h.listenduration::float / NULLIF(e.episodeduration, 0)) >= {min_decimal}'
-
- if playlist.get('playprogressmax') is not None:
- max_decimal = float(playlist["playprogressmax"]) / 100.0
- partial_condition += f' AND (h.listenduration::float / NULLIF(e.episodeduration, 0)) <= {max_decimal}'
-
- play_state_conditions.append(partial_condition)
-
- if playlist['includeplayed']:
- play_state_conditions.append('h.listenduration >= e.episodeduration')
-
- if play_state_conditions:
- conditions.append(f"({' OR '.join(play_state_conditions)})")
-
- # Time filter for PostgreSQL with timezone support
- if playlist.get('timefilterhours') is not None:
- print(f"Applying time filter of {playlist['timefilterhours']} hours with timezone support")
- conditions.append('''
- e.episodepubdate AT TIME ZONE 'UTC'
- AT TIME ZONE COALESCE(u.TimeZone, 'UTC') >
- (CURRENT_TIMESTAMP AT TIME ZONE 'UTC'
- AT TIME ZONE COALESCE(u.TimeZone, 'UTC') - INTERVAL '%s hours')
- ''')
- params.append(playlist['timefilterhours'])
-
- # Add all conditions
- if conditions:
- query += " AND " + " AND ".join(conditions)
-
- # Sorting for PostgreSQL
- sort_mapping = {
- 'date_asc': 'e.episodepubdate ASC',
- 'date_desc': 'e.episodepubdate DESC',
- 'duration_asc': 'e.episodeduration ASC',
- 'duration_desc': 'e.episodeduration DESC',
- 'listen_progress': '(COALESCE(h.listenduration, 0)::float / NULLIF(e.episodeduration, 0)) DESC',
- 'completion': 'COALESCE(h.listenduration::float / NULLIF(e.episodeduration, 0), 0) DESC'
- }
-
- order_by = sort_mapping.get(playlist['sortorder'], 'e.episodepubdate DESC')
- if playlist['groupbypodcast']:
- order_by = f'e.podcastid, {order_by}'
-
- query += f" ORDER BY {order_by}"
-
- else: # MySQL version
- # Check for partially played episodes with progress threshold (Almost Done-like functionality)
- if playlist['includepartiallyplayed'] and not playlist['includeunplayed'] and not playlist['includeplayed'] and playlist.get('playprogressmin') is not None and float(playlist.get('playprogressmin')) >= 75.0:
- # This is the "Almost Done" pattern - episodes that are 75%+ complete but not finished
- query = """
- SELECT DISTINCT e.episodeid
- FROM Episodes e
- JOIN Podcasts p ON e.podcastid = p.podcastid
- JOIN UserEpisodeHistory h ON e.episodeid = h.episodeid
- WHERE h.listenduration > 0
- AND h.listenduration < e.episodeduration
- AND e.Completed = 0
- AND (h.listenduration / NULLIF(e.episodeduration, 0)) >= %s
- """
- min_decimal = float(playlist["playprogressmin"]) / 100.0
- params = [min_decimal]
-
- # Add progress max constraint if specified
- if playlist.get('playprogressmax') is not None:
- max_decimal = float(playlist["playprogressmax"]) / 100.0
- query += f' AND (h.listenduration / NULLIF(e.episodeduration, 0)) <= %s'
- params.append(max_decimal)
-
- print(f"Special query for playlist with high progress threshold ({playlist.get('playprogressmin')}%+)")
-
- # Check for partially played episodes without progress threshold (Currently Listening-like functionality)
- elif playlist['includepartiallyplayed'] and not playlist['includeunplayed'] and not playlist['includeplayed'] and (playlist.get('playprogressmin') is None or float(playlist.get('playprogressmin')) < 75.0):
- # This is the "Currently Listening" pattern - any episode that's started but not finished
- query = """
- SELECT DISTINCT e.episodeid
- FROM Episodes e
- JOIN Podcasts p ON e.podcastid = p.podcastid
- JOIN UserEpisodeHistory h ON e.episodeid = h.episodeid
- WHERE h.listenduration > 0
- AND h.listenduration < e.episodeduration
- AND e.Completed = 0
- """
- params = []
-
- # Add progress min constraint if specified
- if playlist.get('playprogressmin') is not None:
- min_decimal = float(playlist["playprogressmin"]) / 100.0
- query += f' AND (h.listenduration / NULLIF(e.episodeduration, 0)) >= %s'
- params.append(min_decimal)
-
- # Add progress max constraint if specified
- if playlist.get('playprogressmax') is not None:
- max_decimal = float(playlist["playprogressmax"]) / 100.0
- query += f' AND (h.listenduration / NULLIF(e.episodeduration, 0)) <= %s'
- params.append(max_decimal)
-
- print(f"Special query for playlist with in-progress episodes")
-
- else:
- # Basic query structure depends on playlist type
- if is_system_playlist:
- if needs_user_history:
- # System playlist that needs user listening history (e.g., Currently Listening)
- query = """
- SELECT e.episodeid
- FROM Episodes e
- JOIN Podcasts p ON e.podcastid = p.podcastid
- LEFT JOIN UserEpisodeHistory h ON e.episodeid = h.episodeid AND h.userid = %s
- JOIN Users u ON u.UserID = %s
- WHERE 1=1
- """
- params.extend([playlist['userid'], playlist['userid']])
- else:
- # System playlist that doesn't need user history filtering (e.g., Fresh Releases)
- query = """
- SELECT e.episodeid
- FROM Episodes e
- JOIN Podcasts p ON e.podcastid = p.podcastid
- LEFT JOIN UserEpisodeHistory h ON e.episodeid = h.episodeid
- JOIN Users u ON u.UserID = %s
- WHERE 1=1
- """
- params.extend([playlist['userid']]) # Only needed for timezone
-
- print(f"System playlist detected - showing all podcasts")
- else:
- # User-specific playlist - only show user's podcasts
- query = """
- SELECT e.episodeid
- FROM Episodes e
- JOIN Podcasts p ON e.podcastid = p.podcastid
- LEFT JOIN UserEpisodeHistory h ON e.episodeid = h.episodeid AND h.userid = %s
- JOIN Users u ON u.UserID = %s
- WHERE p.UserID = %s
- """
- params.extend([playlist['userid'], playlist['userid'], playlist['userid']])
- print(f"User playlist detected - only showing podcasts for user {playlist['userid']}")
-
- # Podcast filter for MySQL
- if playlist['podcastids']:
- # Convert the PostgreSQL array to a list of integers for MySQL
- if isinstance(playlist['podcastids'], list):
- podcast_ids = playlist['podcastids']
- else:
- # If it's a string representation of a list
- import json
- try:
- podcast_ids = json.loads(playlist['podcastids'])
- except:
- # Fallback for PostgreSQL array string format like '{1,2,3}'
- podcast_ids = [int(id.strip()) for id in playlist['podcastids'].strip('{}').split(',') if id.strip()]
-
- if len(podcast_ids) == 1:
- # Simple equality for a single podcast
- conditions.append('e.podcastid = %s')
- params.append(podcast_ids[0])
- else:
- # IN clause for multiple podcasts
- placeholders = ', '.join(['%s'] * len(podcast_ids))
- conditions.append(f'e.podcastid IN ({placeholders})')
- params.extend(podcast_ids)
-
- # Duration filters
- if playlist['minduration'] is not None:
- conditions.append('e.episodeduration >= %s')
- params.append(playlist['minduration'])
- if playlist['maxduration'] is not None:
- conditions.append('e.episodeduration <= %s')
- params.append(playlist['maxduration'])
-
- # Play state filters with progress
- play_state_conditions = []
-
- if playlist['includeunplayed']:
- play_state_conditions.append('h.listenduration IS NULL')
-
- if playlist['includepartiallyplayed']:
- # Base condition: episodes with some progress but not fully listened
- partial_condition = '(h.listenduration > 0 AND h.listenduration < e.episodeduration AND e.Completed = 0)'
-
- # Add progress range conditions if specified
- if playlist.get('playprogressmin') is not None:
- min_decimal = float(playlist["playprogressmin"]) / 100.0
- partial_condition += f' AND (h.listenduration / NULLIF(e.episodeduration, 0)) >= {min_decimal}'
-
- if playlist.get('playprogressmax') is not None:
- max_decimal = float(playlist["playprogressmax"]) / 100.0
- partial_condition += f' AND (h.listenduration / NULLIF(e.episodeduration, 0)) <= {max_decimal}'
-
- play_state_conditions.append(partial_condition)
-
- if playlist['includeplayed']:
- play_state_conditions.append('h.listenduration >= e.episodeduration')
-
- if play_state_conditions:
- conditions.append(f"({' OR '.join(play_state_conditions)})")
-
- # Time filter for MySQL with timezone support
- if playlist.get('timefilterhours') is not None:
- print(f"Applying time filter of {playlist['timefilterhours']} hours with timezone support")
- conditions.append('''
- CONVERT_TZ(e.episodepubdate, 'UTC', COALESCE(u.TimeZone, 'UTC')) >
- DATE_SUB(CONVERT_TZ(NOW(), 'UTC', COALESCE(u.TimeZone, 'UTC')), INTERVAL %s HOUR)
- ''')
- params.append(playlist['timefilterhours'])
-
- # Add all conditions
- if conditions:
- query += " AND " + " AND ".join(conditions)
-
- # Sorting for MySQL
- sort_mapping = {
- 'date_asc': 'e.episodepubdate ASC',
- 'date_desc': 'e.episodepubdate DESC',
- 'duration_asc': 'e.episodeduration ASC',
- 'duration_desc': 'e.episodeduration DESC',
- 'listen_progress': '(COALESCE(h.listenduration, 0) / NULLIF(e.episodeduration, 0)) DESC',
- 'completion': 'COALESCE(h.listenduration / NULLIF(e.episodeduration, 0), 0) DESC'
- }
-
- order_by = sort_mapping.get(playlist['sortorder'], 'e.episodepubdate DESC')
- if playlist['groupbypodcast']:
- order_by = f'e.podcastid, {order_by}'
-
- query += f" ORDER BY {order_by}"
-
- # Add limit (same for both databases)
- if playlist['maxepisodes']:
- query += " LIMIT %s"
- params.append(playlist['maxepisodes'])
-
- return query, params
-
-def update_fresh_releases_playlist(cnx, database_type):
- """
- Special function to update the Fresh Releases playlist for all users
- considering their individual timezones.
- """
- cursor = cnx.cursor()
- try:
- # First, identify the Fresh Releases playlist ID
- if database_type == "postgresql":
- cursor.execute("""
- SELECT PlaylistID
- FROM "Playlists"
- WHERE Name = 'Fresh Releases' AND IsSystemPlaylist = TRUE
- """)
- else: # MySQL
- cursor.execute("""
- SELECT PlaylistID
- FROM Playlists
- WHERE Name = 'Fresh Releases' AND IsSystemPlaylist = 1
- """)
-
- playlist_result = cursor.fetchone()
- if not playlist_result:
- raise Exception("Fresh Releases playlist not found in system")
-
- # Handle both tuple and dict results
- if isinstance(playlist_result, tuple):
- playlist_id = playlist_result[0]
- else: # dict
- playlist_id = playlist_result["playlistid"]
-
- print(f"Updating Fresh Releases playlist (ID: {playlist_id})")
-
- # Clear existing contents from the playlist
- if database_type == "postgresql":
- cursor.execute('DELETE FROM "PlaylistContents" WHERE playlistid = %s', (playlist_id,))
- else: # MySQL
- cursor.execute('DELETE FROM PlaylistContents WHERE playlistid = %s', (playlist_id,))
-
- # Get all users and their timezones
- if database_type == "postgresql":
- cursor.execute('SELECT UserID, TimeZone FROM "Users"')
- else: # MySQL
- cursor.execute('SELECT UserID, TimeZone FROM Users')
-
- users = cursor.fetchall()
- added_episodes = set() # Track episodes we've already added to avoid duplicates
- position = 0 # For ordering episodes in the playlist
-
- # Process each user
- for user in users:
- # Handle both tuple and dict results for user data
- if isinstance(user, tuple):
- user_id = user[0]
- timezone = user[1] or 'UTC'
- else: # dict
- user_id = user["userid"]
- timezone = user["timezone"] or 'UTC'
-
- print(f"Processing user {user_id} with timezone {timezone}")
-
- # Get episodes from last 24 hours based on user's timezone
- if database_type == "postgresql":
- query = """
- SELECT e.episodeid
- FROM "Episodes" e
- JOIN "Podcasts" p ON e.podcastid = p.podcastid
- WHERE e.episodepubdate AT TIME ZONE 'UTC'
- AT TIME ZONE %s >
- (CURRENT_TIMESTAMP AT TIME ZONE 'UTC'
- AT TIME ZONE %s - INTERVAL '24 hours')
- ORDER BY e.episodepubdate DESC
- """
- cursor.execute(query, (timezone, timezone))
- else: # MySQL
- query = """
- SELECT e.episodeid
- FROM Episodes e
- JOIN Podcasts p ON e.podcastid = p.podcastid
- WHERE CONVERT_TZ(e.episodepubdate, 'UTC', %s) >
- DATE_SUB(CONVERT_TZ(NOW(), 'UTC', %s), INTERVAL 24 HOUR)
- ORDER BY e.episodepubdate DESC
- """
- cursor.execute(query, (timezone, timezone))
-
- recent_episodes = cursor.fetchall()
- print(f"Found {len(recent_episodes)} recent episodes for user {user_id}")
-
- # Add episodes to playlist if not already added
- for episode in recent_episodes:
- # Handle both tuple and dict results for episode data
- if isinstance(episode, tuple):
- episode_id = episode[0]
- else: # dict
- episode_id = episode["episodeid"]
-
- if episode_id not in added_episodes:
- if database_type == "postgresql":
- cursor.execute("""
- INSERT INTO "PlaylistContents" (playlistid, episodeid, position)
- VALUES (%s, %s, %s)
- """, (playlist_id, episode_id, position))
- else: # MySQL
- cursor.execute("""
- INSERT INTO PlaylistContents (playlistid, episodeid, position)
- VALUES (%s, %s, %s)
- """, (playlist_id, episode_id, position))
-
- added_episodes.add(episode_id)
- position += 1
-
- # Update LastUpdated timestamp
- if database_type == "postgresql":
- cursor.execute("""
- UPDATE "Playlists"
- SET lastupdated = CURRENT_TIMESTAMP
- WHERE playlistid = %s
- """, (playlist_id,))
- else: # MySQL
- cursor.execute("""
- UPDATE Playlists
- SET lastupdated = CURRENT_TIMESTAMP
- WHERE playlistid = %s
- """, (playlist_id,))
-
- cnx.commit()
- print(f"Successfully updated Fresh Releases playlist with {len(added_episodes)} unique episodes")
-
- except Exception as e:
- print(f"ERROR updating Fresh Releases playlist: {str(e)}")
- cnx.rollback()
- raise
- finally:
- cursor.close()
-
-
-def update_playlist_contents(cnx, database_type, playlist):
- cursor = cnx.cursor()
- try:
- print(f"\n======= UPDATE PLAYLIST: {playlist['name']} (ID: {playlist['playlistid']}) =======")
-
- # Clear existing contents - database specific
- if database_type == "postgresql":
- cursor.execute('DELETE FROM "PlaylistContents" WHERE playlistid = %s',
- (playlist['playlistid'],))
- else: # MySQL
- # For MySQL, add retry logic to handle deadlocks
- max_retries = 3
- retry_count = 0
-
- while retry_count < max_retries:
- try:
- # Start a fresh transaction for each attempt
- cnx.rollback() # Clear any previous transaction state
-
- cursor.execute('DELETE FROM PlaylistContents WHERE playlistid = %s',
- (playlist['playlistid'],))
- break # Exit the retry loop if successful
- except mysql.connector.errors.InternalError as e:
- if "Deadlock" in str(e) and retry_count < max_retries - 1:
- # If it's a deadlock and we have retries left
- retry_count += 1
- print(f"Deadlock detected, retrying operation (attempt {retry_count}/{max_retries})")
- # Add a small delay before retrying to reduce contention
- import time
- time.sleep(0.5 * retry_count) # Increasing backoff
- else:
- # Either not a deadlock or we've exhausted retries
- raise
-
- print(f"Cleared existing contents for playlist {playlist['playlistid']}")
-
- # Build and execute query
- query, params = build_playlist_query(playlist, database_type)
-
- # Try to create a debug query with params substituted
- debug_query = query
- debug_params = list(params) # Make a copy
-
- try:
- for i, param in enumerate(debug_params):
- placeholder = "%s"
- if param is None:
- replacement = "NULL"
- elif isinstance(param, list):
- if database_type == "postgresql":
- replacement = f"ARRAY[{','.join(map(str, param))}]"
- else: # MySQL
- replacement = f"({','.join(map(str, param))})"
- elif isinstance(param, str):
- replacement = f"'{param}'"
- else:
- replacement = str(param)
-
- debug_query = debug_query.replace(placeholder, replacement, 1)
-
- print(f"DEBUG QUERY: {debug_query}")
- except Exception as e:
- print(f"Error creating debug query: {e}")
-
- # First, let's check if there are any episodes at all for this user
- if database_type == "postgresql":
- basic_check_query = f"""
- SELECT COUNT(*) FROM "Episodes" e
- JOIN "Podcasts" p ON e.podcastid = p.podcastid
- WHERE p.UserID = {playlist['userid']}
- """
- else: # MySQL
- basic_check_query = f"""
- SELECT COUNT(*) FROM Episodes e
- JOIN Podcasts p ON e.podcastid = p.podcastid
- WHERE p.UserID = {playlist['userid']}
- """
- cursor.execute(basic_check_query)
- # Handle both dictionary and tuple result formats
- result = cursor.fetchone()
- if isinstance(result, dict):
- # Dictionary format - use first key in the dict
- total_episodes = result[list(result.keys())[0]]
- else:
- # Tuple format - use first element
- total_episodes = result[0]
-
- print(f"Total episodes available for user {playlist['userid']}: {total_episodes}")
-
- # Now execute the actual filtered query
- cursor.execute(query, params)
- episodes = cursor.fetchall()
- episode_count = len(episodes)
- print(f"Found {episode_count} episodes matching criteria for playlist {playlist['playlistid']}")
-
- # If we found episodes, show some details
- if episode_count > 0:
- # Handle both tuple and dict format episodes
- episode_ids = []
- for ep in episodes[:5]:
- if isinstance(ep, dict):
- episode_ids.append(ep.get('episodeid'))
- else:
- episode_ids.append(ep[0])
-
- print(f"First few episode IDs: {episode_ids}")
-
- # Get details for the first episode
- if episode_count > 0:
- if isinstance(episodes[0], dict):
- first_ep_id = episodes[0].get('episodeid')
- else:
- first_ep_id = episodes[0][0]
-
- if database_type == "postgresql":
- cursor.execute("""
- SELECT e.episodeid, e.episodetitle, e.episodeduration,
- h.listenduration, p.podcastid, p.podcastname, p.userid
- FROM "Episodes" e
- JOIN "Podcasts" p ON e.podcastid = p.podcastid
- LEFT JOIN "UserEpisodeHistory" h ON e.episodeid = h.episodeid AND h.userid = %s
- WHERE e.episodeid = %s
- """, (playlist['userid'], first_ep_id))
- else: # MySQL
- cursor.execute("""
- SELECT e.episodeid, e.episodetitle, e.episodeduration,
- h.listenduration, p.podcastid, p.podcastname, p.userid
- FROM Episodes e
- JOIN Podcasts p ON e.podcastid = p.podcastid
- LEFT JOIN UserEpisodeHistory h ON e.episodeid = h.episodeid AND h.userid = %s
- WHERE e.episodeid = %s
- """, (playlist['userid'], first_ep_id))
-
- ep_details = cursor.fetchone()
- print(f"First episode details: {ep_details}")
-
- # Insert episodes into playlist
- for position, episode in enumerate(episodes):
- if isinstance(episode, dict):
- episode_id = episode.get('episodeid')
- else:
- episode_id = episode[0]
-
- if database_type == "postgresql":
- cursor.execute("""
- INSERT INTO "PlaylistContents" (playlistid, episodeid, position)
- VALUES (%s, %s, %s)
- """, (playlist['playlistid'], episode_id, position))
- else: # MySQL
- cursor.execute("""
- INSERT INTO PlaylistContents (playlistid, episodeid, position)
- VALUES (%s, %s, %s)
- """, (playlist['playlistid'], episode_id, position))
-
- # Update LastUpdated timestamp
- if database_type == "postgresql":
- cursor.execute("""
- UPDATE "Playlists"
- SET lastupdated = CURRENT_TIMESTAMP
- WHERE playlistid = %s
- """, (playlist['playlistid'],))
- else: # MySQL
- cursor.execute("""
- UPDATE Playlists
- SET lastupdated = CURRENT_TIMESTAMP
- WHERE playlistid = %s
- """, (playlist['playlistid'],))
-
- cnx.commit()
- print(f"Successfully updated playlist {playlist['playlistid']} with {episode_count} episodes")
-
- except Exception as e:
- print(f"ERROR updating playlist {playlist['name']}: {str(e)}")
- import traceback
- print(traceback.format_exc())
- cnx.rollback()
- raise
- finally:
- cursor.close()
-
-
-def update_all_playlists(cnx, database_type):
- """
- Update all playlists based on their rules
- """
- cursor = cnx.cursor()
- try:
- print("\n=================== PLAYLIST UPDATE STARTING ===================")
- print("Starting to fetch all playlists")
-
- if database_type == "postgresql":
- cursor.execute('''
- SELECT
- playlistid, userid, name, description, issystemplaylist,
- podcastids, includeunplayed, includepartiallyplayed,
- includeplayed, minduration, maxduration, sortorder,
- groupbypodcast, maxepisodes, playprogressmin,
- playprogressmax, timefilterhours
- FROM "Playlists"
- ''')
- else: # MySQL
- cursor.execute('''
- SELECT
- PlaylistID, UserID, Name, Description, IsSystemPlaylist,
- PodcastIDs, IncludeUnplayed, IncludePartiallyPlayed,
- IncludePlayed, MinDuration, MaxDuration, SortOrder,
- GroupByPodcast, MaxEpisodes, PlayProgressMin,
- PlayProgressMax, TimeFilterHours
- FROM Playlists
- ''')
-
- columns = [desc[0].lower() for desc in cursor.description]
- print(f"Playlist columns: {columns}")
- playlists = cursor.fetchall()
- total_playlists = len(playlists)
- print(f"Found {total_playlists} playlists to update")
-
- # Let's print info about users and their podcasts
- if database_type == "postgresql":
- cursor.execute("""
- SELECT userid, COUNT(DISTINCT podcastid) as podcast_count
- FROM "Podcasts"
- GROUP BY userid
- """)
- else: # MySQL
- cursor.execute("""
- SELECT UserID, COUNT(DISTINCT PodcastID) as podcast_count
- FROM Podcasts
- GROUP BY UserID
- """)
-
- user_podcast_counts = cursor.fetchall()
- print(f"User podcast counts: {user_podcast_counts}")
-
- if database_type == "postgresql":
- cursor.execute("""
- SELECT p.userid, p.podcastid, COUNT(e.episodeid) as episode_count
- FROM "Podcasts" p
- JOIN "Episodes" e ON p.podcastid = e.podcastid
- GROUP BY p.userid, p.podcastid
- ORDER BY p.userid, p.podcastid
- """)
- else: # MySQL
- cursor.execute("""
- SELECT p.UserID, p.PodcastID, COUNT(e.EpisodeID) as episode_count
- FROM Podcasts p
- JOIN Episodes e ON p.PodcastID = e.PodcastID
- GROUP BY p.UserID, p.PodcastID
- ORDER BY p.UserID, p.PodcastID
- """)
-
- podcast_episode_counts = cursor.fetchall()
- print(f"First few podcast episode counts: {podcast_episode_counts[:5]}")
-
- # Handle Fresh Releases separately
- update_fresh_releases_playlist(cnx, database_type)
-
- for idx, playlist in enumerate(playlists, 1):
- if isinstance(playlist, tuple):
- playlist_dict = dict(zip(columns, playlist))
- print(f"DEBUG - Playlist dict keys: {list(playlist_dict.keys())}")
- print(f"DEBUG - Time filter value: {playlist_dict.get('timefilterhours')}")
- else:
- # If it's already a dict, we need to ensure keys are lowercase
- playlist_dict = {k.lower(): v for k, v in playlist.items()}
- print(f"DEBUG - Playlist dict keys: {list(playlist_dict.keys())}")
- print(f"DEBUG - Time filter value: {playlist_dict.get('timefilterhours')}")
-
- # Ensure timefilterhours is properly set
- if 'timefilterhours' not in playlist_dict and 'TimeFilterHours' in playlist_dict:
- playlist_dict['timefilterhours'] = playlist_dict['TimeFilterHours']
-
- # Skip Fresh Releases as it's handled separately
- if playlist_dict.get('name') == 'Fresh Releases' and playlist_dict.get('issystemplaylist', playlist_dict.get('issystemplaylist', False)):
- print(f"Skipping Fresh Releases playlist (ID: {playlist_dict.get('playlistid')}) as it's handled separately")
- continue
-
- print(f"\nProcessing playlist {idx}/{total_playlists}: {playlist_dict.get('name')} (ID: {playlist_dict.get('playlistid')})")
- print(f"UserID: {playlist_dict.get('userid')}")
-
- try:
- update_playlist_contents(cnx, database_type, playlist_dict)
- print(f"Successfully completed playlist {idx}/{total_playlists}")
- except Exception as e:
- print(f"Error updating playlist {idx}/{total_playlists} ID {playlist_dict.get('playlistid')}: {str(e)}")
- continue
-
- print(f"Finished processing all {total_playlists} playlists")
- print("=============== PLAYLIST UPDATE COMPLETE ===============\n")
- cnx.commit()
-
- except Exception as e:
- print(f"Error in update_all_playlists: {str(e)}")
- if hasattr(e, '__traceback__'):
- import traceback
- print(traceback.format_exc())
- cnx.rollback()
- finally:
- cursor.close()
-
-def create_playlist(cnx, database_type, playlist_data):
- """
- Create a new playlist and return its ID
- """
- cursor = cnx.cursor()
- try:
- logging.info(f"Attempting to create playlist with data: {playlist_data}")
- min_duration = playlist_data.min_duration * 60 if playlist_data.min_duration is not None else None
- max_duration = playlist_data.max_duration * 60 if playlist_data.max_duration is not None else None
-
- # Convert podcast_ids list to appropriate format based on database type
- if database_type == "postgresql":
- podcast_ids = playlist_data.podcast_ids # PostgreSQL can handle list directly
- else: # MySQL - convert to JSON string
- import json
- # Always ensure podcast_ids is a list before processing
- if playlist_data.podcast_ids is None:
- podcast_ids = json.dumps([])
- elif isinstance(playlist_data.podcast_ids, (list, tuple)):
- podcast_ids = json.dumps(list(playlist_data.podcast_ids))
- else:
- # Handle single value case
- podcast_ids = json.dumps([playlist_data.podcast_ids])
-
- # Create tuple of values for insert and log them
- insert_values = (
- playlist_data.user_id,
- playlist_data.name,
- playlist_data.description,
- podcast_ids,
- playlist_data.include_unplayed,
- playlist_data.include_partially_played,
- playlist_data.include_played,
- min_duration,
- max_duration,
- playlist_data.sort_order,
- playlist_data.group_by_podcast,
- playlist_data.max_episodes,
- playlist_data.icon_name,
- playlist_data.play_progress_min,
- playlist_data.play_progress_max,
- playlist_data.time_filter_hours
- )
- logging.info(f"Insert values: {insert_values}")
-
- try:
- if database_type == "postgresql":
- cursor.execute("""
- INSERT INTO "Playlists" (
- UserID,
- Name,
- Description,
- IsSystemPlaylist,
- PodcastIDs,
- IncludeUnplayed,
- IncludePartiallyPlayed,
- IncludePlayed,
- MinDuration,
- MaxDuration,
- SortOrder,
- GroupByPodcast,
- MaxEpisodes,
- IconName,
- PlayProgressMin,
- PlayProgressMax,
- TimeFilterHours
- ) VALUES (
- %s, %s, %s, FALSE, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s
- ) RETURNING PlaylistID;
- """, insert_values)
-
- try:
- result = cursor.fetchone()
- logging.info(f"Insert result: {result}")
- if result is None:
- raise Exception("No playlist ID returned from insert")
- # Handle both dict and tuple results
- if isinstance(result, dict):
- playlist_id = result['playlistid']
- else:
- playlist_id = result[0]
- cnx.commit()
-
- # Get the newly created playlist details to update it
- # Make sure podcast_ids is always a list for update_playlist_contents
- update_podcast_ids = playlist_data.podcast_ids
- if update_podcast_ids is None:
- update_podcast_ids = []
- elif not isinstance(update_podcast_ids, (list, tuple)):
- update_podcast_ids = [update_podcast_ids]
-
- playlist_dict = {
- 'playlistid': playlist_id,
- 'userid': playlist_data.user_id,
- 'name': playlist_data.name,
- 'description': playlist_data.description,
- 'issystemplaylist': False,
- 'podcastids': update_podcast_ids,
- 'includeunplayed': playlist_data.include_unplayed,
- 'includepartiallyplayed': playlist_data.include_partially_played,
- 'includeplayed': playlist_data.include_played,
- 'minduration': min_duration,
- 'maxduration': max_duration,
- 'sortorder': playlist_data.sort_order,
- 'groupbypodcast': playlist_data.group_by_podcast,
- 'maxepisodes': playlist_data.max_episodes,
- 'playprogressmin': playlist_data.play_progress_min,
- 'playprogressmax': playlist_data.play_progress_max,
- 'timefilterhours': playlist_data.time_filter_hours
- }
-
- # Update the playlist contents immediately
- update_playlist_contents(cnx, database_type, playlist_dict)
-
- return playlist_id
- except Exception as fetch_e:
- logging.error(f"Error fetching result: {fetch_e}")
- raise
-
- else: # MySQL
- cursor.execute("""
- INSERT INTO Playlists (
- UserID,
- Name,
- Description,
- IsSystemPlaylist,
- PodcastIDs,
- IncludeUnplayed,
- IncludePartiallyPlayed,
- IncludePlayed,
- MinDuration,
- MaxDuration,
- SortOrder,
- GroupByPodcast,
- MaxEpisodes,
- IconName,
- PlayProgressMin,
- PlayProgressMax,
- TimeFilterHours
- ) VALUES (
- %s, %s, %s, FALSE, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s
- );
- """, insert_values)
-
- # For MySQL, we need to get the last inserted ID
- playlist_id = cursor.lastrowid
- if playlist_id is None:
- raise Exception("No playlist ID returned from insert")
- cnx.commit()
-
- # Get the newly created playlist details to update it
- # Make sure podcast_ids is always a list for update_playlist_contents
- update_podcast_ids = playlist_data.podcast_ids
- if update_podcast_ids is None:
- update_podcast_ids = []
- elif not isinstance(update_podcast_ids, (list, tuple)):
- update_podcast_ids = [update_podcast_ids]
-
- playlist_dict = {
- 'playlistid': playlist_id,
- 'userid': playlist_data.user_id,
- 'name': playlist_data.name,
- 'description': playlist_data.description,
- 'issystemplaylist': False,
- 'podcastids': update_podcast_ids,
- 'includeunplayed': playlist_data.include_unplayed,
- 'includepartiallyplayed': playlist_data.include_partially_played,
- 'includeplayed': playlist_data.include_played,
- 'minduration': min_duration,
- 'maxduration': max_duration,
- 'sortorder': playlist_data.sort_order,
- 'groupbypodcast': playlist_data.group_by_podcast,
- 'maxepisodes': playlist_data.max_episodes,
- 'playprogressmin': playlist_data.play_progress_min,
- 'playprogressmax': playlist_data.play_progress_max,
- 'timefilterhours': playlist_data.time_filter_hours
- }
-
- # Update the playlist contents immediately
- update_playlist_contents(cnx, database_type, playlist_dict)
-
- return playlist_id
-
- except Exception as sql_e:
- logging.error(f"SQL execution error: {sql_e}")
- if hasattr(sql_e, 'pgerror'):
- logging.error(f"PG Error: {sql_e.pgerror}")
- if hasattr(sql_e, 'diag'):
- logging.error(f"Diagnostics: {sql_e.diag.message_detail}")
- raise
-
- except Exception as e:
- cnx.rollback()
- logging.error(f"Detailed error creating playlist: {str(e)}")
- logging.error(f"Error type: {type(e)}")
- logging.error(f"Error args: {getattr(e, 'args', None)}")
- raise Exception(f"Failed to create playlist: {str(e)}\nPlaylist data: {playlist_data}")
- finally:
- cursor.close()
-
-def delete_playlist(cnx, database_type, user_id, playlist_id):
- """
- Delete a playlist if it belongs to the user and is not a system playlist
- """
- cursor = cnx.cursor()
- try:
- # Check if playlist exists and belongs to user
- if database_type == "postgresql":
- cursor.execute("""
- SELECT IsSystemPlaylist, UserID
- FROM "Playlists"
- WHERE PlaylistID = %s
- """, (playlist_id,))
- else: # MySQL
- cursor.execute("""
- SELECT IsSystemPlaylist, UserID
- FROM Playlists
- WHERE PlaylistID = %s
- """, (playlist_id,))
-
- result = cursor.fetchone()
- if not result:
- raise Exception("Playlist not found")
-
- # Handle different result formats (tuple vs dict)
- if isinstance(result, tuple):
- is_system = result[0]
- playlist_user_id = result[1]
- else:
- # For dict results, check for both capitalized and lowercase keys
- if 'issystemplaylist' in result:
- is_system = result['issystemplaylist']
- else:
- is_system = result['IsSystemPlaylist']
-
- if 'userid' in result:
- playlist_user_id = result['userid']
- else:
- playlist_user_id = result['UserID']
-
- if is_system:
- raise Exception("Cannot delete system playlists")
- if playlist_user_id != user_id:
- raise Exception("Unauthorized to delete this playlist")
-
- # Delete the playlist
- if database_type == "postgresql":
- cursor.execute("""
- DELETE FROM "Playlists"
- WHERE PlaylistID = %s
- """, (playlist_id,))
- else: # MySQL
- cursor.execute("""
- DELETE FROM Playlists
- WHERE PlaylistID = %s
- """, (playlist_id,))
-
- cnx.commit()
-
- except Exception as e:
- cnx.rollback()
- raise Exception(f"Failed to delete playlist: {str(e)}")
- finally:
- cursor.close()
-
-def normalize_playlist_data(playlist_record):
- """Normalize playlist data regardless of whether it's a tuple or dict."""
- if isinstance(playlist_record, tuple):
- result = {
- 'playlist_id': playlist_record[0],
- 'user_id': playlist_record[1],
- 'name': playlist_record[2],
- 'description': playlist_record[3],
- 'is_system_playlist': playlist_record[4],
- 'podcast_ids': playlist_record[5],
- 'include_unplayed': playlist_record[6],
- 'include_partially_played': playlist_record[7],
- 'include_played': playlist_record[8],
- 'min_duration': playlist_record[9],
- 'max_duration': playlist_record[10],
- 'sort_order': playlist_record[11],
- 'group_by_podcast': playlist_record[12],
- 'max_episodes': playlist_record[13],
- 'last_updated': playlist_record[14],
- 'created': playlist_record[15],
- 'icon_name': playlist_record[16],
- 'episode_count': playlist_record[17]
- }
- else:
- result = {
- 'playlist_id': playlist_record['playlistid'],
- 'user_id': playlist_record['userid'],
- 'name': playlist_record['name'],
- 'description': playlist_record['description'],
- 'is_system_playlist': playlist_record['issystemplaylist'],
- 'podcast_ids': playlist_record['podcastids'],
- 'include_unplayed': playlist_record['includeunplayed'],
- 'include_partially_played': playlist_record['includepartiallyplayed'],
- 'include_played': playlist_record['includeplayed'],
- 'min_duration': playlist_record['minduration'],
- 'max_duration': playlist_record['maxduration'],
- 'sort_order': playlist_record['sortorder'],
- 'group_by_podcast': playlist_record['groupbypodcast'],
- 'max_episodes': playlist_record['maxepisodes'],
- 'last_updated': playlist_record['lastupdated'],
- 'created': playlist_record['created'],
- 'icon_name': playlist_record['iconname'],
- 'episode_count': playlist_record['episode_count']
- }
-
- # Convert null values to appropriate string representations or default values
- if result['last_updated'] is None:
- result['last_updated'] = ""
-
- if result['created'] is None:
- result['created'] = ""
-
- if result['icon_name'] is None:
- result['icon_name'] = "" # Or a default icon name like "ph-playlist"
-
- # Handle episode_count - ensure it's an integer
- if isinstance(result['episode_count'], str): # It's coming back as a timestamp string
- result['episode_count'] = 0
-
- return result
-
-def normalize_preview_episode(episode_record):
- """Normalize episode preview data regardless of whether it's a tuple or dict."""
- if isinstance(episode_record, tuple):
- return {
- 'title': episode_record[0],
- 'artwork': episode_record[1]
- }
- return {
- 'title': episode_record.get('episodetitle', episode_record.get('EpisodeTitle')),
- 'artwork': episode_record.get('episodeartwork', episode_record.get('EpisodeArtwork'))
- }
-
-def get_playlists(cnx, database_type, user_id):
- """
- Get all playlists (system playlists and user's custom playlists)
- Returns consistently formatted dict results regardless of database response format
- """
- try:
- if database_type == "postgresql":
- # Create a cursor that returns dictionaries for PostgreSQL
- cursor = cnx.cursor(row_factory=psycopg.rows.dict_row)
-
- # PostgreSQL query
- cursor.execute("""
- WITH filtered_episodes AS (
- SELECT pc.PlaylistID, pc.EpisodeID
- FROM "PlaylistContents" pc
- JOIN "Episodes" e ON pc.EpisodeID = e.EpisodeID
- JOIN "Podcasts" p ON e.PodcastID = p.PodcastID
- WHERE p.UserID = %s
- )
- SELECT
- p.*,
- COUNT(fe.EpisodeID)::INTEGER as episode_count,
- p.IconName as icon_name
- FROM "Playlists" p
- LEFT JOIN filtered_episodes fe ON p.PlaylistID = fe.PlaylistID
- WHERE p.IsSystemPlaylist = TRUE
- OR p.UserID = %s
- GROUP BY p.PlaylistID
- ORDER BY p.IsSystemPlaylist DESC, p.Name ASC
- """, (user_id, user_id))
-
- playlists = cursor.fetchall()
-
- else: # MySQL
- # Create a cursor for MySQL
- cursor = cnx.cursor(dictionary=True)
-
- # MySQL query
- cursor.execute("""
- WITH filtered_episodes AS (
- SELECT pc.PlaylistID, pc.EpisodeID
- FROM PlaylistContents pc
- JOIN Episodes e ON pc.EpisodeID = e.EpisodeID
- JOIN Podcasts p ON e.PodcastID = p.PodcastID
- WHERE p.UserID = %s
- )
- SELECT
- p.*,
- COUNT(fe.EpisodeID) as episode_count,
- p.IconName as icon_name
- FROM Playlists p
- LEFT JOIN filtered_episodes fe ON p.PlaylistID = fe.PlaylistID
- WHERE p.IsSystemPlaylist = TRUE
- OR p.UserID = %s
- GROUP BY p.PlaylistID
- ORDER BY p.IsSystemPlaylist DESC, p.Name ASC
- """, (user_id, user_id))
-
- playlists = cursor.fetchall()
-
- playlist_list = []
- for playlist_record in playlists:
- # Get the podcast_ids field
- raw_podcast_ids = playlist_record.get('podcastids', playlist_record.get('PodcastIDs'))
-
- # Process podcast_ids based on the data type and database
- processed_podcast_ids = None
- if raw_podcast_ids is not None:
- if database_type == "postgresql":
- # PostgreSQL returns a list directly
- processed_podcast_ids = raw_podcast_ids
- else:
- # MySQL: Handle different formats
- import json
-
- # If it's a single integer, wrap it in a list
- if isinstance(raw_podcast_ids, int):
- processed_podcast_ids = [raw_podcast_ids]
- # If it's a single string that can be parsed as an integer
- elif isinstance(raw_podcast_ids, str) and raw_podcast_ids.strip().isdigit():
- processed_podcast_ids = [int(raw_podcast_ids.strip())]
- # If it's a string, try to parse it
- elif isinstance(raw_podcast_ids, str):
- try:
- # Try to parse as JSON string
- processed_podcast_ids = json.loads(raw_podcast_ids)
- except json.JSONDecodeError:
- # If that fails, try to handle quoted strings
- try:
- # Strip quotes if present
- cleaned = raw_podcast_ids.strip('"\'')
- # Manual parsing for array-like strings
- if cleaned.startswith('[') and cleaned.endswith(']'):
- items = cleaned[1:-1].split(',')
- processed_podcast_ids = [int(item.strip()) for item in items if item.strip()]
- else:
- # For comma-separated list without brackets
- processed_podcast_ids = [int(item.strip()) for item in cleaned.split(',') if item.strip()]
- except (ValueError, AttributeError):
- # Last resort: empty list
- processed_podcast_ids = []
- else:
- # If it's none of the above, keep as is
- processed_podcast_ids = raw_podcast_ids
-
- # Make sure we always return a list
- if processed_podcast_ids is not None and not isinstance(processed_podcast_ids, list):
- processed_podcast_ids = [processed_podcast_ids]
-
- # Normalize field names to handle both PostgreSQL's lowercase and MySQL's capitalized names
- playlist_dict = {
- 'playlist_id': playlist_record.get('playlistid', playlist_record.get('PlaylistID')),
- 'user_id': playlist_record.get('userid', playlist_record.get('UserID')),
- 'name': playlist_record.get('name', playlist_record.get('Name')),
- 'description': playlist_record.get('description', playlist_record.get('Description')),
- 'is_system_playlist': bool(playlist_record.get('issystemplaylist', playlist_record.get('IsSystemPlaylist'))),
- 'podcast_ids': processed_podcast_ids, # Use our processed value
- 'include_unplayed': bool(playlist_record.get('includeunplayed', playlist_record.get('IncludeUnplayed'))),
- 'include_partially_played': bool(playlist_record.get('includepartiallyplayed', playlist_record.get('IncludePartiallyPlayed'))),
- 'include_played': bool(playlist_record.get('includeplayed', playlist_record.get('IncludePlayed'))),
- 'min_duration': playlist_record.get('minduration', playlist_record.get('MinDuration')),
- 'max_duration': playlist_record.get('maxduration', playlist_record.get('MaxDuration')),
- 'sort_order': playlist_record.get('sortorder', playlist_record.get('SortOrder')),
- 'group_by_podcast': bool(playlist_record.get('groupbypodcast', playlist_record.get('GroupByPodcast'))),
- 'max_episodes': playlist_record.get('maxepisodes', playlist_record.get('MaxEpisodes')),
- 'last_updated': playlist_record.get('lastupdated', playlist_record.get('LastUpdated', "")),
- 'created': playlist_record.get('created', playlist_record.get('Created', "")),
- 'icon_name': playlist_record.get('iconname', playlist_record.get('IconName', "")),
- 'episode_count': int(playlist_record.get('episode_count', 0) or 0)
- }
-
- # Get preview episodes with error handling
- try:
- if database_type == "postgresql":
- # Use dict cursor for PostgreSQL
- preview_cursor = cnx.cursor(row_factory=psycopg.rows.dict_row)
- preview_cursor.execute("""
- SELECT e.EpisodeTitle as episodetitle, e.EpisodeArtwork as episodeartwork
- FROM "PlaylistContents" pc
- JOIN "Episodes" e ON pc.EpisodeID = e.EpisodeID
- JOIN "Podcasts" p ON e.PodcastID = p.PodcastID
- WHERE pc.PlaylistID = %s
- AND p.UserID = %s
- ORDER BY pc.Position
- LIMIT 3
- """, (playlist_dict['playlist_id'], user_id))
- else: # MySQL
- # Use dict cursor for MySQL
- preview_cursor = cnx.cursor(dictionary=True)
- preview_cursor.execute("""
- SELECT e.EpisodeTitle as episodetitle, e.EpisodeArtwork as episodeartwork
- FROM PlaylistContents pc
- JOIN Episodes e ON pc.EpisodeID = e.EpisodeID
- JOIN Podcasts p ON e.PodcastID = p.PodcastID
- WHERE pc.PlaylistID = %s
- AND p.UserID = %s
- ORDER BY pc.Position
- LIMIT 3
- """, (playlist_dict['playlist_id'], user_id))
-
- preview_episodes = preview_cursor.fetchall()
-
- # Normalize field names for preview episodes
- playlist_dict['preview_episodes'] = []
- for ep in preview_episodes:
- # Handle both PostgreSQL and MySQL column naming
- title = ep.get('episodetitle', ep.get('EpisodeTitle', ''))
- artwork = ep.get('episodeartwork', ep.get('EpisodeArtwork', ''))
- playlist_dict['preview_episodes'].append({
- 'title': title,
- 'artwork': artwork
- })
-
- preview_cursor.close()
- except Exception as e:
- print(f"Error fetching preview episodes for playlist {playlist_dict['playlist_id']}: {e}")
- playlist_dict['preview_episodes'] = []
-
- playlist_list.append(playlist_dict)
-
- return playlist_list
- except Exception as e:
- raise Exception(f"Failed to get playlists: {str(e)}")
- finally:
- if 'cursor' in locals():
- cursor.close()
-
-def normalize_episode(episode):
- """Normalize episode data regardless of tuple or dict format"""
- if isinstance(episode, tuple):
- return {
- 'episodeid': episode[0],
- 'episodetitle': episode[1],
- 'episodedescription': episode[2],
- 'episodeartwork': episode[3],
- 'episodepubdate': episode[4],
- 'episodeurl': episode[5],
- 'episodeduration': episode[6],
- 'listenduration': episode[7],
- 'completed': bool(episode[8]) if episode[8] is not None else False,
- 'saved': bool(episode[9]) if episode[9] is not None else False,
- 'queued': bool(episode[10]) if episode[10] is not None else False,
- 'is_youtube': bool(episode[11]) if episode[11] is not None else False,
- 'downloaded': bool(episode[12]) if episode[12] is not None else False,
- 'podcastname': episode[13]
- }
-
- # For dict case, map field names explicitly
- field_mappings = {
- 'episodeid': ['episodeid', 'EpisodeID'],
- 'episodetitle': ['episodetitle', 'EpisodeTitle'],
- 'episodedescription': ['episodedescription', 'EpisodeDescription'],
- 'episodeartwork': ['episodeartwork', 'EpisodeArtwork'],
- 'episodepubdate': ['episodepubdate', 'EpisodePubDate'],
- 'episodeurl': ['episodeurl', 'EpisodeURL'],
- 'episodeduration': ['episodeduration', 'EpisodeDuration'],
- 'listenduration': ['listenduration', 'ListenDuration'],
- 'completed': bool(episode['completed']) if episode['completed'] is not None else False,
- 'saved': bool(episode['saved']) if episode['saved'] is not None else False,
- 'queued': bool(episode['queued']) if episode['queued'] is not None else False,
- 'is_youtube': bool(episode.get('isyoutube', False)), # Use get() with default False
- 'downloaded': bool(episode['downloaded']) if episode['downloaded'] is not None else False,
- 'podcastname': ['podcastname', 'PodcastName']
- }
-
- result = {}
- for field, possible_keys in field_mappings.items():
- # Try all possible keys for each field
- value = None
- for key in possible_keys:
- value = episode.get(key)
- if value is not None:
- break
-
- # Handle booleans
- if field in ['completed', 'saved', 'queued', 'is_youtube', 'downloaded']:
- value = value or False
-
- result[field] = value
-
- return result
-
-def normalize_playlist_info(playlist_info):
- """Normalize playlist info data regardless of tuple or dict format"""
- if isinstance(playlist_info, tuple):
- return {
- 'name': playlist_info[0],
- 'description': playlist_info[1],
- 'episode_count': playlist_info[2],
- 'icon_name': playlist_info[3]
- }
- # For dict case, first try lowercase keys (most common)
- name = playlist_info.get('name')
- description = playlist_info.get('description')
- episode_count = playlist_info.get('episode_count')
- icon_name = playlist_info.get('iconname') # Note: this comes back as 'iconname' not 'icon_name'
-
- # If any are None, try uppercase keys as fallback
- if name is None:
- name = playlist_info.get('Name')
- if description is None:
- description = playlist_info.get('Description')
- if episode_count is None:
- episode_count = playlist_info.get('EpisodeCount')
- if icon_name is None:
- icon_name = playlist_info.get('IconName')
-
- return {
- 'name': name,
- 'description': description,
- 'episode_count': episode_count,
- 'icon_name': icon_name
- }
-
-def get_playlist_episodes(cnx, database_type, user_id, playlist_id):
- """
- Get all episodes in a playlist, applying the playlist's filters
- Returns both playlist info and episodes in format matching Rust structs
- """
- print(f"Starting playlist episodes fetch for playlist_id={playlist_id}")
- cursor = cnx.cursor()
- try:
- # Get playlist info
- # Get playlist info with user-specific episode count
- if database_type == "postgresql":
- cursor.execute("""
- SELECT
- p.Name,
- p.Description,
- (SELECT COUNT(*)
- FROM "PlaylistContents" pc
- JOIN "Episodes" e ON pc.EpisodeID = e.EpisodeID
- JOIN "Podcasts" pod ON e.PodcastID = pod.PodcastID
- LEFT JOIN "UserEpisodeHistory" h ON e.EpisodeID = h.EpisodeID AND h.UserID = %s
- WHERE pc.PlaylistID = p.PlaylistID
- AND (p.IsSystemPlaylist = FALSE OR
- (p.IsSystemPlaylist = TRUE AND
- (h.EpisodeID IS NOT NULL OR pod.UserID = %s)))
- ) as episode_count,
- p.IconName,
- p.IsSystemPlaylist
- FROM "Playlists" p
- WHERE p.PlaylistID = %s AND (p.UserID = %s OR p.IsSystemPlaylist = TRUE)
- GROUP BY p.PlaylistID, p.Name, p.Description, p.IconName, p.IsSystemPlaylist
- """, (user_id, user_id, playlist_id, user_id))
- # Get playlist info with user-specific episode count
- else: # MySQL
- cursor.execute("""
- SELECT
- p.Name,
- p.Description,
- (SELECT COUNT(*)
- FROM PlaylistContents pc
- JOIN Episodes e ON pc.EpisodeID = e.EpisodeID
- JOIN Podcasts pod ON e.PodcastID = pod.PodcastID
- LEFT JOIN UserEpisodeHistory h ON e.EpisodeID = h.EpisodeID AND h.UserID = %s
- WHERE pc.PlaylistID = p.PlaylistID
- AND (p.IsSystemPlaylist = 0 OR
- (p.IsSystemPlaylist = 1 AND
- (h.EpisodeID IS NOT NULL OR pod.UserID = %s)))
- ) as episode_count,
- p.IconName,
- p.IsSystemPlaylist
- FROM Playlists p
- WHERE p.PlaylistID = %s AND (p.UserID = %s OR p.IsSystemPlaylist = 1)
- """, (user_id, user_id, playlist_id, user_id))
-
- playlist_info = cursor.fetchone()
-
- if not playlist_info:
- raise Exception(f"Playlist {playlist_id} not found or access denied")
-
- # Handle both tuple and dict formats for playlist info
- is_system_playlist = False
- if isinstance(playlist_info, tuple):
- normalized_info = {
- 'name': playlist_info[0],
- 'description': playlist_info[1],
- 'episode_count': playlist_info[2],
- 'icon_name': playlist_info[3]
- }
- is_system_playlist = playlist_info[4]
- else:
- # Handle both upper and lower case keys
- normalized_info = {
- 'name': playlist_info.get('name') or playlist_info.get('Name'),
- 'description': playlist_info.get('description') or playlist_info.get('Description'),
- 'episode_count': playlist_info.get('episode_count') or playlist_info.get('episode_count'),
- 'icon_name': playlist_info.get('iconname') or playlist_info.get('IconName')
- }
- is_system_playlist = playlist_info.get('issystemplaylist') or playlist_info.get('IsSystemPlaylist')
-
- print(f"Debug - playlist_info type: {type(playlist_info)}")
- print(f"Debug - playlist_info content: {playlist_info}")
- print(f"Debug - normalized playlist info: {normalized_info}")
- print(f"Debug - is_system_playlist: {is_system_playlist}")
-
- # Get playlist settings
- if database_type == "postgresql":
- cursor.execute("""
- SELECT
- IncludeUnplayed,
- IncludePartiallyPlayed,
- IncludePlayed,
- MinDuration,
- MaxDuration,
- SortOrder,
- GroupByPodcast,
- MaxEpisodes,
- PodcastIDs
- FROM "Playlists"
- WHERE PlaylistID = %s AND (UserID = %s OR IsSystemPlaylist = TRUE)
- """, (playlist_id, user_id))
- else: # MySQL
- cursor.execute("""
- SELECT
- IncludeUnplayed,
- IncludePartiallyPlayed,
- IncludePlayed,
- MinDuration,
- MaxDuration,
- SortOrder,
- GroupByPodcast,
- MaxEpisodes,
- PodcastIDs
- FROM Playlists
- WHERE PlaylistID = %s AND (UserID = %s OR IsSystemPlaylist = 1)
- """, (playlist_id, user_id))
-
- playlist_settings = cursor.fetchone()
- if isinstance(playlist_settings, dict):
- # Handle both uppercase and lowercase keys
- settings = [
- playlist_settings.get('includeunplayed', playlist_settings.get('IncludeUnplayed')),
- playlist_settings.get('includepartiallyplayed', playlist_settings.get('IncludePartiallyPlayed')),
- playlist_settings.get('includeplayed', playlist_settings.get('IncludePlayed')),
- playlist_settings.get('minduration', playlist_settings.get('MinDuration')),
- playlist_settings.get('maxduration', playlist_settings.get('MaxDuration')),
- playlist_settings.get('sortorder', playlist_settings.get('SortOrder')),
- playlist_settings.get('groupbypodcast', playlist_settings.get('GroupByPodcast')),
- playlist_settings.get('maxepisodes', playlist_settings.get('MaxEpisodes')),
- playlist_settings.get('podcastids', playlist_settings.get('PodcastIDs'))
- ]
- else: # tuple
- settings = playlist_settings
- print(f"Debug - playlist_settings type: {type(playlist_settings)}")
- print(f"Debug - playlist_settings content: {playlist_settings}")
-
- (include_unplayed, include_partially_played, include_played,
- min_duration, max_duration, sort_order, group_by_podcast,
- max_episodes, podcast_ids) = settings
-
- # Build episode query with appropriate table names for each database
- if database_type == "postgresql":
- query = """
- SELECT DISTINCT
- e.EpisodeID,
- e.EpisodeTitle,
- e.EpisodeDescription,
- e.EpisodeArtwork,
- e.EpisodePubDate,
- e.EpisodeURL,
- e.EpisodeDuration,
- el.ListenDuration as ListenDuration,
- CASE
- WHEN el.ListenDuration >= e.EpisodeDuration THEN TRUE
- ELSE FALSE
- END as Completed,
- es.SaveID IS NOT NULL as Saved,
- eq.QueueID IS NOT NULL as Queued,
- eq.is_youtube as IsYouTube,
- ed.DownloadID IS NOT NULL as Downloaded,
- p.PodcastName
- FROM "PlaylistContents" pc
- JOIN "Episodes" e ON pc.EpisodeID = e.EpisodeID
- JOIN "Podcasts" p ON e.PodcastID = p.PodcastID
- LEFT JOIN "UserEpisodeHistory" el ON e.EpisodeID = el.EpisodeID AND el.UserID = %s
- LEFT JOIN "SavedEpisodes" es ON e.EpisodeID = es.EpisodeID AND es.UserID = %s
- LEFT JOIN "EpisodeQueue" eq ON e.EpisodeID = eq.EpisodeID AND eq.UserID = %s
- LEFT JOIN "DownloadedEpisodes" ed ON e.EpisodeID = ed.EpisodeID AND ed.UserID = %s
- WHERE pc.PlaylistID = %s
- AND (p.UserID = %s OR NOT %s)
- """
- else: # MySQL
- query = """
- SELECT DISTINCT
- e.EpisodeID,
- e.EpisodeTitle,
- e.EpisodeDescription,
- e.EpisodeArtwork,
- e.EpisodePubDate,
- e.EpisodeURL,
- e.EpisodeDuration,
- el.ListenDuration as ListenDuration,
- CASE
- WHEN el.ListenDuration >= e.EpisodeDuration THEN 1
- ELSE 0
- END as Completed,
- es.SaveID IS NOT NULL as Saved,
- eq.QueueID IS NOT NULL as Queued,
- eq.is_youtube as IsYouTube,
- ed.DownloadID IS NOT NULL as Downloaded,
- p.PodcastName
- FROM PlaylistContents pc
- JOIN Episodes e ON pc.EpisodeID = e.EpisodeID
- JOIN Podcasts p ON e.PodcastID = p.PodcastID
- LEFT JOIN UserEpisodeHistory el ON e.EpisodeID = el.EpisodeID AND el.UserID = %s
- LEFT JOIN SavedEpisodes es ON e.EpisodeID = es.EpisodeID AND es.UserID = %s
- LEFT JOIN EpisodeQueue eq ON e.EpisodeID = eq.EpisodeID AND eq.UserID = %s
- LEFT JOIN DownloadedEpisodes ed ON e.EpisodeID = ed.EpisodeID AND ed.UserID = %s
- WHERE pc.PlaylistID = %s
- AND (p.UserID = %s OR NOT %s)
- """
- params = [user_id, user_id, user_id, user_id, playlist_id, user_id, is_system_playlist]
-
- # Add sorting logic
- if sort_order == "date_desc":
- query += " ORDER BY e.EpisodePubDate DESC"
- elif sort_order == "date_asc":
- query += " ORDER BY e.EpisodePubDate ASC"
- elif sort_order == "duration_desc":
- query += " ORDER BY e.EpisodeDuration DESC"
- elif sort_order == "duration_asc":
- query += " ORDER BY e.EpisodeDuration ASC"
-
- # Add limit if specified
- if max_episodes:
- query += " LIMIT %s"
- params.append(max_episodes)
-
- print(f"Debug - final query: {query}")
- print(f"Debug - final params: {params}")
-
- cursor.execute(query, tuple(params))
- episodes = cursor.fetchall()
- print(f"Debug - episodes type: {type(episodes)}")
- print(f"Debug - first episode content: {episodes[0] if episodes else None}")
- print(f"Debug - number of episodes: {len(episodes)}")
-
- # Normalize all episodes
- episode_list = []
- for episode in episodes:
- if isinstance(episode, tuple):
- episode_dict = {
- 'episodeid': episode[0],
- 'episodetitle': episode[1],
- 'episodedescription': episode[2],
- 'episodeartwork': episode[3],
- 'episodepubdate': episode[4],
- 'episodeurl': episode[5],
- 'episodeduration': episode[6],
- 'listenduration': episode[7],
- 'completed': bool(episode[8]) if episode[8] is not None else False,
- 'saved': bool(episode[9]) if episode[9] is not None else False,
- 'queued': bool(episode[10]) if episode[10] is not None else False,
- 'is_youtube': bool(episode[11]) if episode[11] is not None else False,
- 'downloaded': bool(episode[12]) if episode[12] is not None else False,
- 'podcastname': episode[13]
- }
- else:
- # Handle both upper and lower case dictionary keys
- episode_dict = {
- 'episodeid': episode.get('episodeid', episode.get('EpisodeID')),
- 'episodetitle': episode.get('episodetitle', episode.get('EpisodeTitle')),
- 'episodedescription': episode.get('episodedescription', episode.get('EpisodeDescription')),
- 'episodeartwork': episode.get('episodeartwork', episode.get('EpisodeArtwork')),
- 'episodepubdate': episode.get('episodepubdate', episode.get('EpisodePubDate')),
- 'episodeurl': episode.get('episodeurl', episode.get('EpisodeURL')),
- 'episodeduration': episode.get('episodeduration', episode.get('EpisodeDuration')),
- 'listenduration': episode.get('listenduration', episode.get('ListenDuration')),
- 'completed': bool(episode.get('completed', episode.get('Completed'))) if episode.get('completed', episode.get('Completed')) is not None else False,
- 'saved': bool(episode.get('saved', episode.get('Saved'))) if episode.get('saved', episode.get('Saved')) is not None else False,
- 'queued': bool(episode.get('queued', episode.get('Queued'))) if episode.get('queued', episode.get('Queued')) is not None else False,
- 'is_youtube': bool(episode.get('isyoutube', episode.get('IsYouTube'))) if episode.get('isyoutube', episode.get('IsYouTube')) is not None else False,
- 'downloaded': bool(episode.get('downloaded', episode.get('Downloaded'))) if episode.get('downloaded', episode.get('Downloaded')) is not None else False,
- 'podcastname': episode.get('podcastname', episode.get('PodcastName'))
- }
- episode_list.append(episode_dict)
-
- # Return directly matching Rust struct - no extra nesting
- return {
- "playlist_info": normalized_info,
- "episodes": episode_list
- }
-
- except Exception as e:
- raise Exception(f"Failed to get playlist episodes: {str(e)}")
- finally:
- cursor.close()
-
-def get_episode_id_by_url_key(database_type, cnx, url_key):
- cursor = cnx.cursor()
-
- query = '''
- SELECT EpisodeID FROM "SharedEpisodes" WHERE UrlKey = %s AND ExpirationDate > NOW()
- ''' if database_type == "postgresql" else '''
- SELECT EpisodeID FROM SharedEpisodes WHERE UrlKey = %s AND ExpirationDate > NOW()
- '''
-
- try:
- cursor.execute(query, (url_key,))
- result = cursor.fetchone()
-
- # Debug: print the result type and value
- print(f"Result: {result}, Type: {type(result)}")
-
- if result:
- # Safely handle result as either tuple or dict
- if isinstance(result, tuple):
- print('tuple')
- episode_id = result[0] # Access tuple
- elif isinstance(result, dict):
- print('dict')
- if database_type == 'postgresql':
- episode_id = result['episodeid'] # Access dict
- else:
- episode_id = result['EpisodeID'] # Access dict
- else:
- episode_id = None # If somehow it's neither, default to None
- else:
- episode_id = None
- print(episode_id)
- cursor.close()
- return episode_id
- except Exception as e:
- print(f"Error retrieving episode by URL key: {e}")
- cursor.close()
- return None
-
-
-
-def add_gpodder_settings(database_type, cnx, user_id, gpodder_url, gpodder_token, login_name, pod_sync_type):
- print("Adding gPodder settings")
- the_key = get_encryption_key(cnx, database_type)
-
- cursor = cnx.cursor()
- from cryptography.fernet import Fernet
-
- encryption_key_bytes = base64.b64decode(the_key)
-
- cipher_suite = Fernet(encryption_key_bytes)
-
- # Only encrypt password if it's not None
- if gpodder_token is not None:
- encrypted_password = cipher_suite.encrypt(gpodder_token.encode())
- # Decode encrypted password back to string
- decoded_token = encrypted_password.decode()
- else:
- decoded_token = None
-
- query = (
- 'UPDATE "Users" SET GpodderUrl = %s, GpodderLoginName = %s, GpodderToken = %s, Pod_Sync_Type = %s WHERE UserID = %s' if database_type == "postgresql" else
- "UPDATE Users SET GpodderUrl = %s, GpodderLoginName = %s, GpodderToken = %s, Pod_Sync_Type = %s WHERE UserID = %s"
- )
-
- cursor.execute(query, (gpodder_url, login_name, decoded_token, pod_sync_type, user_id))
-
- # Check if the update was successful
- if cursor.rowcount == 0:
- return None
-
- cnx.commit() # Commit changes to the database
- cursor.close()
-
- return True
-
-def add_gpodder_server(database_type, cnx, user_id, gpodder_url, gpodder_username, gpodder_password):
- print("Adding gPodder settings")
- the_key = get_encryption_key(cnx, database_type)
-
- cursor = cnx.cursor()
- from cryptography.fernet import Fernet
-
- encryption_key_bytes = base64.b64decode(the_key)
-
- cipher_suite = Fernet(encryption_key_bytes)
-
- # Only encrypt password if it's not None
- if gpodder_password is not None:
- encrypted_password = cipher_suite.encrypt(gpodder_password.encode())
- # Decode encrypted password back to string
- decoded_token = encrypted_password.decode()
- else:
- decoded_token = None
-
- query = (
- 'UPDATE "Users" SET GpodderUrl = %s, GpodderLoginName = %s, GpodderToken = %s, Pod_Sync_Type = %s WHERE UserID = %s' if database_type == "postgresql" else
- "UPDATE Users SET GpodderUrl = %s, GpodderLoginName = %s, GpodderToken = %s, Pod_Sync_Type = %s WHERE UserID = %s"
- )
- pod_sync_type = "gpodder"
- cursor.execute(query, (gpodder_url, gpodder_username, decoded_token, pod_sync_type, user_id))
-
- # Check if the update was successful
- if cursor.rowcount == 0:
- return None
-
- cnx.commit() # Commit changes to the database
- cursor.close()
-
- return True
-
-
-
-def get_gpodder_settings(database_type, cnx, user_id):
- """Get the GPodder settings for a user with improved error handling"""
- import logging
-
- logger = logging.getLogger(__name__)
-
- # Check if cnx is a valid connection object
- if not hasattr(cnx, 'cursor'):
- logger.error(f"Invalid database connection object: {type(cnx)}")
- return {}
-
- cursor = cnx.cursor()
- try:
- query = (
- 'SELECT GpodderUrl, GpodderToken, GpodderLoginName FROM "Users" WHERE UserID = %s' if database_type == "postgresql" else
- "SELECT GpodderUrl, GpodderToken, GpodderLoginName FROM Users WHERE UserID = %s"
- )
- cursor.execute(query, (user_id,))
- result = cursor.fetchone()
-
- # Ensure result is consistent
- if result:
- if isinstance(result, tuple):
- # Convert tuple result to a dictionary
- result = {
- "gpodderurl": result[0],
- "gpoddertoken": result[1],
- "gpodderloginname": result[2]
- }
- elif isinstance(result, dict):
- # Normalize keys to lower case if necessary
- result = {k.lower(): v for k, v in result.items()}
- else:
- result = {}
-
- # Apply lowercase keys if needed
- if 'lowercase_keys' in globals():
- return lowercase_keys(result)
- return result
- except Exception as e:
- logger.error(f"Error in get_gpodder_settings: {str(e)}")
- return {}
- finally:
- cursor.close()
-
-
-
-
-def get_nextcloud_settings(database_type, cnx, user_id):
- cursor = cnx.cursor()
- try:
- query = (
- 'SELECT GpodderUrl, GpodderToken, GpodderLoginName FROM "Users" WHERE UserID = %s' if database_type == "postgresql" else
- "SELECT GpodderUrl, GpodderToken, GpodderLoginName FROM Users WHERE UserID = %s"
- )
- cursor.execute(query, (user_id,))
- result = cursor.fetchone()
-
- if result:
- if isinstance(result, dict):
- # Handle PostgreSQL dictionary result
- url = result.get('gpodderurl')
- token = result.get('gpoddertoken')
- login = result.get('gpodderloginname')
- else:
- # Handle tuple result
- url, token, login = result[0], result[1], result[2]
-
- if url and token and login:
- return url, token, login
-
- return None, None, None
- finally:
- cursor.close()
-
-def get_gpodder_type(cnx, database_type, user_id):
- cursor = cnx.cursor()
- query = (
- 'SELECT Pod_Sync_Type FROM "Users" WHERE UserID = %s' if database_type == "postgresql" else
- "SELECT Pod_Sync_Type FROM Users WHERE UserID = %s"
- )
- cursor.execute(query, (user_id,))
- result = cursor.fetchone()
- cursor.close()
-
- if result:
- if isinstance(result, dict):
- return result.get('pod_sync_type' if database_type == 'postgresql' else 'Pod_Sync_Type')
- elif isinstance(result, (list, tuple)):
- return result[0]
- return None
-
-
-
-
-def remove_gpodder_settings(database_type, cnx, user_id):
- """Remove GPodder sync settings for a user"""
- import logging
- logger = logging.getLogger(__name__)
-
- cursor = cnx.cursor()
- try:
- # First delete any device records
- if database_type == "postgresql":
- devices_query = 'DELETE FROM "GpodderDevices" WHERE UserID = %s'
- sync_state_query = 'DELETE FROM "GpodderSyncState" WHERE UserID = %s'
- else:
- devices_query = "DELETE FROM GpodderDevices WHERE UserID = %s"
- sync_state_query = "DELETE FROM GpodderSyncState WHERE UserID = %s"
-
- cursor.execute(devices_query, (user_id,))
- cursor.execute(sync_state_query, (user_id,))
-
- # Then clear GPodder settings from user record
- if database_type == "postgresql":
- user_query = '''
- UPDATE "Users"
- SET GpodderUrl = '', GpodderLoginName = '', GpodderToken = '', Pod_Sync_Type = 'None'
- WHERE UserID = %s
- '''
- else:
- user_query = '''
- UPDATE Users
- SET GpodderUrl = '', GpodderLoginName = '', GpodderToken = '', Pod_Sync_Type = 'None'
- WHERE UserID = %s
- '''
-
- cursor.execute(user_query, (user_id,))
- cnx.commit()
- return True
- except Exception as e:
- logger.error(f"Error removing GPodder settings: {e}")
- cnx.rollback()
- return False
- finally:
- cursor.close()
-
-
-
-def check_gpodder_settings(database_type, cnx, user_id):
- cursor = cnx.cursor()
- query = (
- 'SELECT GpodderUrl, GpodderToken FROM "Users" WHERE UserID = %s' if database_type == "postgresql" else
- "SELECT GpodderUrl, GpodderToken FROM Users WHERE UserID = %s"
- )
- cursor.execute(query, (user_id,))
- result = cursor.fetchone()
- cursor.close()
-
- if result:
- if isinstance(result, dict):
- gpodder_url = result.get('gpodderurl' if database_type == 'postgresql' else 'GpodderUrl')
- gpodder_token = result.get('gpoddertoken' if database_type == 'postgresql' else 'GpodderToken')
- elif isinstance(result, (list, tuple)):
- gpodder_url = result[0]
- gpodder_token = result[1]
-
- if gpodder_url and gpodder_token:
- return True
-
- return False
-
-
-def get_nextcloud_users(database_type, cnx):
- cursor = cnx.cursor()
- # Query to select users with either external sync configuration OR internal gpodder API enabled
- if database_type == "postgresql":
- query = """
- SELECT UserID, GpodderUrl, GpodderToken, GpodderLoginName, Pod_Sync_Type
- FROM "Users"
- WHERE (GpodderUrl <> '' AND GpodderToken <> '' AND GpodderLoginName <> '')
- OR Pod_Sync_Type IN ('gpodder', 'both')
- """
- else: # MySQL or MariaDB
- query = """
- SELECT UserID, GpodderUrl, GpodderToken, GpodderLoginName, Pod_Sync_Type
- FROM Users
- WHERE (GpodderUrl <> '' AND GpodderToken <> '' AND GpodderLoginName <> '')
- OR Pod_Sync_Type IN ('gpodder', 'both')
- """
- cursor.execute(query)
- # Fetch all matching records
- users = cursor.fetchall()
- cursor.close()
- return users
-
-
-import datetime
-
-def current_timestamp():
- # Return the current time in 'YYYY-MM-DDTHH:MM:SS' format, without fractional seconds or 'Z'
- return datetime.datetime.now(datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%S')
-
-def add_podcast_to_nextcloud(cnx, database_type, gpodder_url, gpodder_login, encrypted_gpodder_token, podcast_url):
- from cryptography.fernet import Fernet
- from requests.auth import HTTPBasicAuth
-
- encryption_key = get_encryption_key(cnx, database_type)
- encryption_key_bytes = base64.b64decode(encryption_key)
-
- cipher_suite = Fernet(encryption_key_bytes)
-
- # Decrypt the token
- if encrypted_gpodder_token is not None:
- decrypted_token_bytes = cipher_suite.decrypt(encrypted_gpodder_token.encode())
- gpodder_token = decrypted_token_bytes.decode()
- else:
- gpodder_token = None
-
- url = f"{gpodder_url}/index.php/apps/gpoddersync/subscription_change/create"
- auth = HTTPBasicAuth(gpodder_login, gpodder_token) # Using Basic Auth
- data = {
- "add": [podcast_url],
- "remove": []
- }
- headers = {
- "Content-Type": "application/json"
- }
- response = requests.post(url, json=data, headers=headers, auth=auth)
- try:
- response.raise_for_status()
- print(f"Podcast added to Nextcloud successfully: {response.text}")
- except requests.exceptions.HTTPError as e:
- print(f"Failed to add podcast to Nextcloud: {e}")
- print(f"Response body: {response.text}")
-
-def add_podcast_to_opodsync(cnx, database_type, user_id, gpodder_url, gpodder_login, gpodder_token, podcast_url, device_id="default"):
- import requests
- from requests.auth import HTTPBasicAuth
- # Initialize response variable to None
- response = None
- try:
- # Get user ID from gpodder_login
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- query = 'SELECT UserID, GpodderUrl FROM "Users" WHERE Username = %s'
- else:
- query = 'SELECT UserID, GpodderUrl FROM Users WHERE Username = %s'
-
- cursor.execute(query, (gpodder_login,))
- user_result = cursor.fetchone()
-
- user_id = None
- user_gpodder_url = None
-
- if user_result:
- if isinstance(user_result, dict):
- user_id = user_result.get('userid')
- user_gpodder_url = user_result.get('gpodderurl')
- elif isinstance(user_result, tuple):
- user_id = user_result[0]
- user_gpodder_url = user_result[1]
- finally:
- cursor.close()
-
- # Detect if this is the internal API
- is_internal_api = (gpodder_url == "http://localhost:8042")
- # Create auth object - this is used for both session and direct auth
- auth = HTTPBasicAuth(gpodder_login, gpodder_token)
- # Create headers - add special header only for internal API
- headers = {"Content-Type": "application/json"}
- if is_internal_api:
- headers["X-GPodder-Token"] = gpodder_token
- print("Using internal API with X-GPodder-Token header")
- # Prepare request data
- data = {
- "add": [podcast_url],
- "remove": []
- }
- # Try session-based auth first (works with many external servers)
- try:
- session = requests.Session()
- login_url = f"{gpodder_url}/api/2/auth/{gpodder_login}/login.json"
- print(f"Attempting session login at: {login_url}")
- login_response = session.post(login_url, auth=auth, headers=headers if is_internal_api else None)
- login_response.raise_for_status()
- print("Session login successful for podcast add")
- # Use the session to add the podcast
- url = f"{gpodder_url}/api/2/subscriptions/{gpodder_login}/{device_id}.json"
- print(f"Sending POST request to: {url}")
- response = session.post(url, json=data, headers=headers)
- response.raise_for_status()
- print(f"Podcast added to oPodSync successfully using session: {response.text}")
-
- # If this is internal GPodder sync and we have a user ID, update UserStats
- if is_internal_api and user_id is not None:
- try:
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'UPDATE "UserStats" SET PodcastsAdded = PodcastsAdded + 1 WHERE UserID = %s'
- else: # MySQL or MariaDB
- query = "UPDATE UserStats SET PodcastsAdded = PodcastsAdded + 1 WHERE UserID = %s"
-
- cursor.execute(query, (user_id,))
- cnx.commit()
- print(f"Incremented PodcastsAdded count for user {user_id} in UserStats table")
- except Exception as stats_err:
- print(f"Error updating UserStats: {stats_err}")
- finally:
- cursor.close()
-
- return response.json()
- except Exception as e:
- print(f"Session auth failed, trying basic auth: {str(e)}")
- # Fall back to direct basic auth
- url = f"{gpodder_url}/api/2/subscriptions/{gpodder_login}/{device_id}.json"
- print(f"Sending direct POST request to: {url}")
- print(f"Using headers: {headers}")
- print(f"Using auth with username: {gpodder_login}")
- response = requests.post(url, json=data, headers=headers, auth=auth)
- print(f"Response status: {response.status_code}")
- response.raise_for_status()
- print(f"Podcast added to oPodSync successfully with basic auth: {response.text}")
-
- # If this is internal GPodder sync and we have a user ID, update UserStats
- if is_internal_api and user_id is not None:
- try:
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'UPDATE "UserStats" SET PodcastsAdded = PodcastsAdded + 1 WHERE UserID = %s'
- else: # MySQL or MariaDB
- query = "UPDATE UserStats SET PodcastsAdded = PodcastsAdded + 1 WHERE UserID = %s"
-
- cursor.execute(query, (user_id,))
- cnx.commit()
- print(f"Incremented PodcastsAdded count for user {user_id} in UserStats table")
- except Exception as stats_err:
- print(f"Error updating UserStats: {stats_err}")
- finally:
- cursor.close()
-
- return response.json()
- except Exception as e:
- print(f"Failed to add podcast to oPodSync: {e}")
- if response is not None:
- print(f"Response body: {getattr(response, 'text', 'No response object')}")
- print(f"Status code: {getattr(response, 'status_code', 'No status code')}")
- # If there was a server error, try to get more information
- if getattr(response, 'status_code', 0) >= 500:
- print("Server returned an error. Check gpodder API logs for more details.")
- else:
- print("No response received (error occurred before HTTP request)")
- return None
-
-def remove_podcast_from_nextcloud(cnx, database_type, gpodder_url, gpodder_login, encrypted_gpodder_token, podcast_url):
- from cryptography.fernet import Fernet
- from requests.auth import HTTPBasicAuth
-
- encryption_key = get_encryption_key(cnx, database_type)
- encryption_key_bytes = base64.b64decode(encryption_key)
-
- cipher_suite = Fernet(encryption_key_bytes)
-
- # Decrypt the token
- if encrypted_gpodder_token is not None:
- decrypted_token_bytes = cipher_suite.decrypt(encrypted_gpodder_token.encode())
- gpodder_token = decrypted_token_bytes.decode()
- else:
- gpodder_token = None
-
- url = f"{gpodder_url}/index.php/apps/gpoddersync/subscription_change/create"
- auth = HTTPBasicAuth(gpodder_login, gpodder_token) # Using Basic Auth
- headers = {
- "Content-Type": "application/json"
- }
- data = {
- "add": [],
- "remove": [podcast_url]
- }
- response = requests.post(url, json=data, headers=headers, auth=auth)
- try:
- response.raise_for_status()
- print(f"Podcast removed from Nextcloud successfully: {response.text}")
- except requests.exceptions.HTTPError as e:
- print(f"Failed to remove podcast from Nextcloud: {e}")
- print(f"Response body: {response.text}")
-
-
-def remove_podcast_from_opodsync(cnx, database_type, user_id, gpodder_url, gpodder_login, gpodder_token, podcast_url, device_id="default"):
- from requests.auth import HTTPBasicAuth
- import requests
- import traceback
- import mysql.connector
- import psycopg
-
- # Track if we've handled episode removal internally
- episodes_handled = False
- response = None
-
- try:
- # Validate required parameters first
- if not gpodder_url or not gpodder_login or not podcast_url:
- error_msg = "Missing required parameters for oPodSync removal"
- print(f"Failed to remove podcast from oPodSync: {error_msg}")
- return False, episodes_handled
-
- # Check if token is provided
- if gpodder_token is None:
- print("No gpodder token provided")
- return False, episodes_handled
-
- # Detect if this is the internal API
- is_internal_api = (gpodder_url == "http://localhost:8042")
-
- # For internal API, handle episode deletion directly to avoid foreign key constraints
- if is_internal_api:
- print("Using internal gPodder API - handling episodes directly")
-
- # First, get the podcast_id for this feed URL
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- # PostgreSQL: Quoted table names, unquoted lowercase column names
- podcast_query = 'SELECT podcastid FROM "Podcasts" WHERE feedurl = %s AND userid = %s'
- else: # MySQL or MariaDB
- # MySQL/MariaDB: Unquoted table and column names with proper case
- podcast_query = 'SELECT PodcastID FROM Podcasts WHERE FeedURL = %s AND UserID = %s'
-
- cursor.execute(podcast_query, (podcast_url, user_id))
- result = cursor.fetchone()
-
- podcast_id = None
- if result:
- # Extract podcast_id based on the result type
- if isinstance(result, dict):
- podcast_id = result.get('podcastid') or result.get('PodcastID')
- else: # tuple
- podcast_id = result[0]
-
- if podcast_id:
- print(f"Found podcast ID {podcast_id} for URL {podcast_url}")
-
- # Now delete all related data to handle the foreign key constraints
- if database_type == "postgresql":
- # PostgreSQL: Quoted table names, unquoted lowercase column names
- delete_playlist_contents = 'DELETE FROM "PlaylistContents" WHERE episodeid IN (SELECT episodeid FROM "Episodes" WHERE podcastid = %s)'
- delete_history = 'DELETE FROM "UserEpisodeHistory" WHERE episodeid IN (SELECT episodeid FROM "Episodes" WHERE podcastid = %s)'
- delete_downloaded = 'DELETE FROM "DownloadedEpisodes" WHERE episodeid IN (SELECT episodeid FROM "Episodes" WHERE podcastid = %s)'
- delete_saved = 'DELETE FROM "SavedEpisodes" WHERE episodeid IN (SELECT episodeid FROM "Episodes" WHERE podcastid = %s)'
- delete_queue = 'DELETE FROM "EpisodeQueue" WHERE episodeid IN (SELECT episodeid FROM "Episodes" WHERE podcastid = %s)'
- delete_episodes = 'DELETE FROM "Episodes" WHERE podcastid = %s'
- delete_podcast = 'DELETE FROM "Podcasts" WHERE podcastid = %s'
- update_user_stats = 'UPDATE "UserStats" SET podcastsadded = podcastsadded - 1 WHERE userid = %s'
- else: # MySQL or MariaDB
- # MySQL/MariaDB: Unquoted table and column names with proper case
- delete_playlist_contents = 'DELETE FROM PlaylistContents WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)'
- delete_history = 'DELETE FROM UserEpisodeHistory WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)'
- delete_downloaded = 'DELETE FROM DownloadedEpisodes WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)'
- delete_saved = 'DELETE FROM SavedEpisodes WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)'
- delete_queue = 'DELETE FROM EpisodeQueue WHERE EpisodeID IN (SELECT EpisodeID FROM Episodes WHERE PodcastID = %s)'
- delete_episodes = 'DELETE FROM Episodes WHERE PodcastID = %s'
- delete_podcast = 'DELETE FROM Podcasts WHERE PodcastID = %s'
- update_user_stats = 'UPDATE UserStats SET PodcastsAdded = PodcastsAdded - 1 WHERE UserID = %s'
-
- # Execute the deletion statements in order
- try:
- cursor.execute(delete_playlist_contents, (podcast_id,))
- print(f"Deleted playlist contents for podcast ID {podcast_id}")
-
- cursor.execute(delete_history, (podcast_id,))
- print(f"Deleted episode history for podcast ID {podcast_id}")
-
- cursor.execute(delete_downloaded, (podcast_id,))
- print(f"Deleted downloaded episodes for podcast ID {podcast_id}")
-
- cursor.execute(delete_saved, (podcast_id,))
- print(f"Deleted saved episodes for podcast ID {podcast_id}")
-
- cursor.execute(delete_queue, (podcast_id,))
- print(f"Deleted queued episodes for podcast ID {podcast_id}")
-
- cursor.execute(delete_episodes, (podcast_id,))
- print(f"Deleted episodes for podcast ID {podcast_id}")
-
- cursor.execute(delete_podcast, (podcast_id,))
- print(f"Deleted podcast with ID {podcast_id}")
-
- cursor.execute(update_user_stats, (user_id,))
- print(f"Updated user stats for user ID {user_id}")
-
- cnx.commit()
- print("All database operations committed successfully")
- episodes_handled = True
- except (psycopg.Error, mysql.connector.Error) as db_err:
- print(f"Database error during podcast deletion: {db_err}")
- cnx.rollback()
- # Continue with API call even if direct deletion failed
- else:
- print(f"Podcast ID not found for URL {podcast_url}")
- except Exception as podcast_error:
- print(f"Error finding podcast ID: {podcast_error}")
- finally:
- cursor.close()
-
- # Create auth object - this is used for both session and direct auth
- auth = HTTPBasicAuth(gpodder_login, gpodder_token)
-
- # Create headers - add special header only for internal API
- headers = {"Content-Type": "application/json"}
- if is_internal_api:
- headers["X-GPodder-Token"] = gpodder_token
- print("Using internal API with X-GPodder-Token header")
-
- # Create a session for cookie-based auth
- session = requests.Session()
-
- # Try to establish a session first (for PodFetch)
- try:
- login_url = f"{gpodder_url}/api/2/auth/{gpodder_login}/login.json"
- print(f"Attempting session login at: {login_url}")
- login_response = session.post(login_url, auth=auth, headers=headers if is_internal_api else None, timeout=10)
- login_response.raise_for_status()
- print("Session login successful for podcast removal")
-
- # Use the session to remove the podcast
- url = f"{gpodder_url}/api/2/subscriptions/{gpodder_login}/{device_id}.json"
- data = {
- "add": [],
- "remove": [podcast_url]
- }
- print(f"Sending POST request to: {url}")
- response = session.post(url, json=data, headers=headers, timeout=10)
- response.raise_for_status()
- print(f"Podcast removed from oPodSync successfully using session: {response.text}")
- return True, episodes_handled
-
- except requests.exceptions.RequestException as session_error:
- print(f"Session auth failed, trying basic auth: {str(session_error)}")
-
- # Fall back to basic auth
- url = f"{gpodder_url}/api/2/subscriptions/{gpodder_login}/{device_id}.json"
- print(f"Sending direct POST request to: {url}")
- print(f"Using headers: {headers}")
- print(f"Using auth with username: {gpodder_login}")
- data = {
- "add": [],
- "remove": [podcast_url]
- }
-
- try:
- response = requests.post(url, json=data, headers=headers, auth=auth, timeout=10)
- print(f"Response status: {response.status_code}")
- response.raise_for_status()
- print(f"Podcast removed from oPodSync successfully with basic auth: {response.text}")
- return True, episodes_handled
- except requests.exceptions.RequestException as basic_auth_error:
- print(f"Basic auth removal failed: {str(basic_auth_error)}")
- return False, episodes_handled
-
- except Exception as e:
- error_details = traceback.format_exc()
- print(f"Failed to remove podcast from oPodSync: {str(e)}\n{error_details}")
- if response is not None:
- print(f"Response body: {getattr(response, 'text', 'No response object')}")
- print(f"Status code: {getattr(response, 'status_code', 'No status code')}")
- # If there was a server error, try to get more information
- if getattr(response, 'status_code', 0) >= 500:
- print("Server returned an error. Check gpodder API logs for more details.")
- else:
- print("No response received (error occurred before HTTP request)")
- return False, episodes_handled
-
-
-
-def refresh_nextcloud_subscription(database_type, cnx, user_id, gpodder_url, encrypted_gpodder_token, gpodder_login, pod_sync_type):
- # Set up logging
- logging.basicConfig(level=logging.INFO)
- logger = logging.getLogger(__name__)
-
- try:
- # Fetch and decrypt token
- encryption_key = get_encryption_key(cnx, database_type)
- encryption_key_bytes = base64.b64decode(encryption_key)
- cipher_suite = Fernet(encryption_key_bytes)
-
- if encrypted_gpodder_token is not None:
- decrypted_token_bytes = cipher_suite.decrypt(encrypted_gpodder_token.encode())
- gpodder_token = decrypted_token_bytes.decode()
- else:
- gpodder_token = None
-
- auth = HTTPBasicAuth(gpodder_login, gpodder_token)
- logger.info("Starting Nextcloud subscription refresh")
-
- # Get Nextcloud subscriptions
- response = requests.get(
- f"{gpodder_url}/index.php/apps/gpoddersync/subscriptions",
- auth=auth
- )
- response.raise_for_status()
-
- nextcloud_podcasts = response.json().get("add", [])
- logger.info(f"Fetched Nextcloud podcasts: {nextcloud_podcasts}")
-
- # Get local podcasts
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'SELECT FeedURL FROM "Podcasts" WHERE UserID = %s'
- else:
- query = "SELECT FeedURL FROM Podcasts WHERE UserID = %s"
-
- cursor.execute(query, (user_id,))
- local_podcasts = [row[0] for row in cursor.fetchall()]
-
- podcasts_to_add = set(nextcloud_podcasts) - set(local_podcasts)
- podcasts_to_remove = set(local_podcasts) - set(nextcloud_podcasts)
-
- # Track successful operations
- successful_additions = set()
- successful_removals = set()
-
- # Add new podcasts with individual error handling
- logger.info("Adding new podcasts...")
- for feed_url in podcasts_to_add:
- try:
- podcast_values = get_podcast_values(feed_url, user_id)
- feed_cutoff = 30
- return_value = add_podcast(cnx, database_type, podcast_values, user_id, feed_cutoff)
- if return_value:
- logger.info(f"Successfully added {feed_url}")
- successful_additions.add(feed_url)
- else:
- logger.error(f"Failed to add {feed_url}")
- except Exception as e:
- logger.error(f"Error processing {feed_url}: {str(e)}")
- continue # Continue with next podcast even if this one fails
-
- # Remove podcasts with individual error handling
- logger.info("Removing podcasts...")
- for feed_url in podcasts_to_remove:
- try:
- if database_type == "postgresql":
- query = 'SELECT PodcastName FROM "Podcasts" WHERE FeedURL = %s'
- else:
- query = "SELECT PodcastName FROM Podcasts WHERE FeedURL = %s"
-
- cursor.execute(query, (feed_url,))
- result = cursor.fetchone()
-
- if result:
- podcast_name = result[0]
- if remove_podcast(cnx, database_type, podcast_name, feed_url, user_id):
- successful_removals.add(feed_url)
- logger.info(f"Successfully removed {feed_url}")
- else:
- logger.error(f"Failed to remove {feed_url}")
- else:
- logger.warning(f"No podcast found with URL: {feed_url}")
- except Exception as e:
- logger.error(f"Error removing {feed_url}: {str(e)}")
- continue
-
- cnx.commit()
- cursor.close()
-
- # Sync changes with Nextcloud
- if successful_additions or successful_removals:
- try:
- sync_subscription_change(
- gpodder_url,
- {"Authorization": f"Bearer {gpodder_token}"},
- list(successful_additions),
- list(successful_removals)
- )
- except Exception as e:
- logger.error(f"Error syncing changes with Nextcloud: {str(e)}")
-
- # Process episode actions
- try:
- process_nextcloud_episode_actions(gpodder_url, gpodder_token, cnx, database_type, user_id)
- except Exception as e:
- logger.error(f"Error processing episode actions: {str(e)}")
-
- # Sync local episode times
- try:
- sync_nextcloud_episode_times(gpodder_url, gpodder_login, gpodder_token, cnx, database_type, user_id)
- except Exception as e:
- logger.error(f"Error syncing local episode times: {str(e)}")
-
- except Exception as e:
- logger.error(f"Major error in refresh_nextcloud_subscription: {str(e)}")
- raise
-
-def process_nextcloud_episode_actions(gpodder_url, gpodder_token, cnx, database_type, user_id):
- logger = logging.getLogger(__name__)
-
- try:
- # Use the correct Nextcloud endpoint
- response = requests.get(
- f"{gpodder_url}/index.php/apps/gpoddersync/episode_action",
- headers={"Authorization": f"Bearer {gpodder_token}"}
- )
- response.raise_for_status()
- episode_actions = response.json()
-
- cursor = cnx.cursor()
-
- for action in episode_actions.get('actions', []):
- try:
- if action["action"].lower() in ["play", "update_time"]:
- if "position" in action and action["position"] != -1:
- episode_id = get_episode_id_by_url(cnx, database_type, action["episode"])
- if episode_id:
- # Update listen duration
- record_listen_duration(cnx, database_type, episode_id, user_id, int(action["position"]))
-
- # Check for completion, mirroring gPodder logic
- if ("total" in action and action["total"] > 0 and
- action["position"] >= action["total"]):
- if database_type == "postgresql":
- update_query = '''
- UPDATE "Episodes"
- SET Completed = TRUE
- WHERE EpisodeID = %s
- '''
- else:
- update_query = '''
- UPDATE Episodes
- SET Completed = TRUE
- WHERE EpisodeID = %s
- '''
- cursor.execute(update_query, (episode_id,))
- cnx.commit()
- logger.info(f"Marked episode {episode_id} as completed")
-
- logger.info(f"Recorded listen duration for episode {episode_id}")
- else:
- logger.warning(f"No episode ID found for URL {action['episode']}")
- except Exception as e:
- logger.error(f"Error processing episode action {action}: {str(e)}")
- continue
-
- cursor.close()
- except Exception as e:
- logger.error(f"Error fetching episode actions: {str(e)}")
- raise
-
-def sync_nextcloud_episode_times(gpodder_url, gpodder_login, gpodder_token, cnx, database_type, user_id, UPLOAD_BULK_SIZE=30):
- logger = logging.getLogger(__name__)
-
- try:
- local_episode_times = get_local_episode_times(cnx, database_type, user_id)
- update_actions = []
-
- for episode_time in local_episode_times:
- # Only include episodes with valid duration data
- if episode_time["episode_duration"] and episode_time["listen_duration"]:
- # If episode is completed, set position equal to total duration
- position = (episode_time["episode_duration"]
- if episode_time["completed"]
- else episode_time["listen_duration"])
-
- action = {
- "podcast": episode_time["podcast_url"],
- "episode": episode_time["episode_url"],
- "action": "play",
- "timestamp": current_timestamp(),
- "position": position,
- "started": 0,
- "total": episode_time["episode_duration"],
- "guid": generate_guid(episode_time)
- }
- update_actions.append(action)
-
- # Split into chunks and process
- update_actions_chunks = [
- update_actions[i:i + UPLOAD_BULK_SIZE]
- for i in range(0, len(update_actions), UPLOAD_BULK_SIZE)
- ]
-
- from urllib.parse import urljoin
- for chunk in update_actions_chunks:
- try:
- url = urljoin(gpodder_url, "/index.php/apps/gpoddersync/episode_action/create")
- response = requests.post(
- url,
- json=chunk,
- auth=HTTPBasicAuth(gpodder_login, gpodder_token),
- headers={"Accept": "application/json"}
- )
- response.raise_for_status()
- logger.info(f"Successfully uploaded chunk of {len(chunk)} episode times")
- except Exception as e:
- logger.error(f"Error uploading chunk: {str(e)}")
- continue
-
- except Exception as e:
- logger.error(f"Error syncing local episode times: {str(e)}")
- raise
-
-def get_user_devices(cnx, database_type, user_id):
- """Get all GPodder devices for a user with proper datetime conversion"""
- import logging
- logger = logging.getLogger(__name__)
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- query = '''
- SELECT DeviceID, DeviceName, DeviceType, DeviceCaption, LastSync, IsActive, IsDefault
- FROM "GpodderDevices"
- WHERE UserID = %s
- '''
- else:
- query = '''
- SELECT DeviceID, DeviceName, DeviceType, DeviceCaption, LastSync, IsActive, IsDefault
- FROM GpodderDevices
- WHERE UserID = %s
- '''
- cursor.execute(query, (user_id,))
- devices = []
- for row in cursor.fetchall():
- if isinstance(row, dict):
- # Handle dict-style result (depends on the driver)
- # Convert datetime to string
- last_sync = row["lastsync"].isoformat() if row["lastsync"] else None
- device = {
- "id": row["deviceid"],
- "name": row["devicename"],
- "type": row["devicetype"],
- "caption": row["devicecaption"],
- "last_sync": last_sync,
- "is_active": row["isactive"],
- "is_remote": False,
- "is_default": row["isdefault"]
- }
- else:
- # Handle tuple-style result
- # Convert datetime to string
- last_sync = row[4].isoformat() if row[4] else None
- device = {
- "id": row[0],
- "name": row[1],
- "type": row[2],
- "caption": row[3],
- "last_sync": last_sync,
- "is_active": row[5],
- "is_remote": False,
- "is_default": row[6] if len(row) > 6 else False
- }
- devices.append(device)
- return devices
- except Exception as e:
- logger.error(f"Error getting user devices: {e}")
- return []
- finally:
- cursor.close()
-
-# Add this to your database_functions/functions.py file
-
-def handle_remote_device(cnx, database_type, user_id, device_name):
- """
- Handles setting a remote device (with negative ID) as default by creating
- a local representation or using an existing one.
-
- Args:
- cnx: Database connection
- database_type: Type of database ('postgresql' or other)
- user_id: User ID
- device_name: Name of the remote device
-
- Returns:
- tuple: (success: bool, message: str, device_id: int)
- """
- import logging
- logger = logging.getLogger(__name__)
-
- try:
- # First check if device exists - if so, set it as default
- existing_id = find_device_by_name(cnx, database_type, user_id, device_name)
-
- if existing_id:
- # Device exists, set it as default
- logger.info(f"Found existing device with name {device_name}, ID: {existing_id}")
- success = set_default_gpodder_device(cnx, database_type, user_id, existing_id)
- return (success, "Existing device set as default", existing_id)
-
- # Create new device
- new_device_id = create_or_update_device(
- cnx,
- database_type,
- user_id,
- device_name,
- "remote", # Type for remote devices
- f"Remote device from GPodder server"
- )
-
- if not new_device_id:
- logger.error("Failed to create device for remote device")
- return (False, "Failed to create local representation of remote device", None)
-
- # Set as default
- success = set_default_gpodder_device(cnx, database_type, user_id, new_device_id)
- return (success, "Remote device created and set as default", new_device_id)
-
- except Exception as e:
- logger.error(f"Error handling remote device: {e}")
- return (False, f"Error: {str(e)}", None)
-
-
-def find_device_by_name(cnx, database_type, user_id, device_name):
- """
- Find a device by name for a specific user
-
- Args:
- cnx: Database connection
- database_type: Type of database
- user_id: User ID
- device_name: Device name to find
-
- Returns:
- int: Device ID or None if not found
- """
- try:
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'SELECT DeviceID FROM "GpodderDevices" WHERE UserID = %s AND DeviceName = %s'
- else:
- query = 'SELECT DeviceID FROM GpodderDevices WHERE UserID = %s AND DeviceName = %s'
-
- cursor.execute(query, (user_id, device_name))
- result = cursor.fetchone()
-
- if result:
- if isinstance(result, tuple):
- return result[0]
- else:
- return result["deviceid"]
- return None
- except Exception as e:
- print(f"Error finding device by name: {e}")
- return None
- finally:
- cursor.close()
-
-def create_or_update_device(cnx, database_type, user_id, device_name, device_type="desktop", device_caption=None, is_default=False):
- """
- Creates a new device or updates an existing one.
- If is_default is True, this device will be set as the default.
- """
- try:
- cursor = cnx.cursor()
-
- # Check if device exists
- if database_type == "postgresql":
- query = """
- SELECT DeviceID FROM "GpodderDevices"
- WHERE UserID = %s AND DeviceName = %s
- """
- else:
- query = """
- SELECT DeviceID FROM GpodderDevices
- WHERE UserID = %s AND DeviceName = %s
- """
-
- cursor.execute(query, (user_id, device_name))
- result = cursor.fetchone()
-
- if result:
- # Device exists, update it
- device_id = result[0] if isinstance(result, tuple) else result["deviceid"]
-
- if database_type == "postgresql":
- query = """
- UPDATE "GpodderDevices"
- SET DeviceType = %s, DeviceCaption = %s, LastSync = CURRENT_TIMESTAMP
- WHERE DeviceID = %s
- """
- else:
- query = """
- UPDATE GpodderDevices
- SET DeviceType = %s, DeviceCaption = %s, LastSync = CURRENT_TIMESTAMP
- WHERE DeviceID = %s
- """
-
- cursor.execute(query, (device_type, device_caption, device_id))
-
- # If this should be the default device, set it
- if is_default:
- set_default_gpodder_device(cnx, database_type, user_id, device_id)
-
- cnx.commit()
- return device_id
- else:
- # Device doesn't exist, create it
- if database_type == "postgresql":
- query = """
- INSERT INTO "GpodderDevices" (UserID, DeviceName, DeviceType, DeviceCaption, IsDefault)
- VALUES (%s, %s, %s, %s, %s)
- RETURNING DeviceID
- """
- else:
- query = """
- INSERT INTO GpodderDevices (UserID, DeviceName, DeviceType, DeviceCaption, IsDefault)
- VALUES (%s, %s, %s, %s, %s)
- """
-
- # If this is the first device for the user, make it the default
- if is_default:
- cursor.execute(query, (user_id, device_name, device_type, device_caption, True))
- else:
- # Check if this is the first device
- if database_type == "postgresql":
- count_query = 'SELECT COUNT(*) as count FROM "GpodderDevices" WHERE UserID = %s'
- else:
- count_query = 'SELECT COUNT(*) as count FROM GpodderDevices WHERE UserID = %s'
-
- cursor.execute(count_query, (user_id,))
- result = cursor.fetchone()
-
- # Handle different result formats from different database types
- if result is None:
- count = 0
- elif isinstance(result, tuple):
- count = result[0]
- elif isinstance(result, dict) and "count" in result:
- count = result["count"]
- else:
- # Try to get value safely
- try:
- count = list(result.values())[0] if result else 0
- except:
- count = 0
-
- # If this is the first device, make it the default
- is_first_device = count == 0
- cursor.execute(query, (user_id, device_name, device_type, device_caption, is_first_device))
-
- if database_type == "postgresql":
- result = cursor.fetchone()
- device_id = result[0] if result and isinstance(result, tuple) else (result['deviceid'] if result else None)
- else:
- device_id = cursor.lastrowid
-
- cnx.commit()
- return device_id
- except Exception as e:
- print(f"Error creating/updating device: {e}")
- cnx.rollback()
- return None
- finally:
- cursor.close()
-
-def get_sync_timestamps(cnx, database_type, user_id, device_id):
- """Get sync timestamps for a device, with default values if not found"""
- try:
- cursor = cnx.cursor()
- # Handle negative device IDs (remote devices)
- if device_id and device_id < 0:
- print(f"Error getting sync timestamps: Device ID {device_id} is negative (remote device)")
- # Return default timestamps for remote devices
- return {"last_timestamp": 0, "episodes_timestamp": 0}
- if database_type == "postgresql":
- query = '''
- SELECT LastTimestamp, EpisodesTimestamp
- FROM "GpodderSyncState"
- WHERE UserID = %s AND DeviceID = %s
- '''
- else:
- query = '''
- SELECT LastTimestamp, EpisodesTimestamp
- FROM GpodderSyncState
- WHERE UserID = %s AND DeviceID = %s
- '''
- cursor.execute(query, (user_id, device_id))
- result = cursor.fetchone()
- if result:
- if isinstance(result, tuple):
- return {
- "last_timestamp": result[0] or 0,
- "episodes_timestamp": result[1] or 0
- }
- else:
- return {
- "last_timestamp": result.get("lasttimestamp", 0) or 0,
- "episodes_timestamp": result.get("episodestimestamp", 0) or 0
- }
- else:
- # No timestamps found, create default record
- if database_type == "postgresql":
- insert_query = '''
- INSERT INTO "GpodderSyncState" (UserID, DeviceID, LastTimestamp, EpisodesTimestamp)
- VALUES (%s, %s, 0, 0)
- ON CONFLICT (UserID, DeviceID) DO NOTHING
- '''
- else:
- # For MySQL, use INSERT IGNORE instead of ON CONFLICT
- insert_query = '''
- INSERT IGNORE INTO GpodderSyncState (UserID, DeviceID, LastTimestamp, EpisodesTimestamp)
- VALUES (%s, %s, 0, 0)
- '''
- try:
- cursor.execute(insert_query, (user_id, device_id))
- cnx.commit()
- except Exception as e:
- print(f"Error creating sync timestamps: {e}")
- # Don't let this error abort everything
- cnx.rollback()
- return {"last_timestamp": 0, "episodes_timestamp": 0}
- except Exception as e:
- print(f"Error getting sync timestamps: {e}")
- return {"last_timestamp": 0, "episodes_timestamp": 0}
- finally:
- cursor.close()
-
-def update_sync_timestamp(cnx, database_type, user_id, device_id, timestamp_type, new_timestamp):
- """Update the sync timestamp for a particular user and device"""
- if timestamp_type not in ["last_timestamp", "episodes_timestamp"]:
- raise ValueError("Invalid timestamp_type. Must be 'last_timestamp' or 'episodes_timestamp'")
-
- cursor = cnx.cursor()
- try:
- db_column = "LastTimestamp" if timestamp_type == "last_timestamp" else "EpisodesTimestamp"
-
- if database_type == "postgresql":
- query = f'''
- UPDATE "GpodderSyncState"
- SET {db_column} = %s
- WHERE UserID = %s AND DeviceID = %s
- '''
- else:
- query = f'''
- UPDATE GpodderSyncState
- SET {db_column} = %s
- WHERE UserID = %s AND DeviceID = %s
- '''
-
- cursor.execute(query, (new_timestamp, user_id, device_id))
- cnx.commit()
- return True
- except Exception as e:
- print(f"Error updating sync timestamp: {e}")
- cnx.rollback()
- return False
- finally:
- cursor.close()
-
-def get_or_create_default_device(cnx, database_type, user_id):
- """Get the default device for a user or create it if it doesn't exist"""
- default_device_name = "pinepods_default"
-
- # Try to find existing default device
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- query = '''
- SELECT DeviceID FROM "GpodderDevices"
- WHERE UserID = %s AND DeviceName = %s
- '''
- else:
- query = '''
- SELECT DeviceID FROM GpodderDevices
- WHERE UserID = %s AND DeviceName = %s
- '''
-
- cursor.execute(query, (user_id, default_device_name))
- result = cursor.fetchone()
-
- if result:
- # Default device exists
- return result[0] if isinstance(result, tuple) else result["deviceid"]
- else:
- # Create default device
- return create_or_update_device(
- cnx,
- database_type,
- user_id,
- default_device_name,
- "desktop",
- "Pinepods Default Device"
- )
- except Exception as e:
- logger.error(f"Error getting/creating default device: {e}")
- return None
- finally:
- cursor.close()
-
-def get_current_timestamp():
- """Get current timestamp in format expected by gpodder API"""
- return int(time.time())
-
-
-def create_or_get_gpodder_device(cnx, database_type, user_id, device_name, device_type, device_caption):
- """
- Create a gpodder device if it doesn't exist, or get its ID if it does
-
- Args:
- cnx: Database connection
- database_type: Type of database (postgresql or mysql)
- user_id: User ID
- device_name: Device name
- device_type: Device type (server, desktop, mobile, etc.)
- device_caption: Human-readable device caption
-
- Returns:
- Device ID if successful, None if failed
- """
- try:
- cursor = cnx.cursor()
-
- # Check if device exists
- if database_type == "postgresql":
- query = 'SELECT DeviceID FROM "GpodderDevices" WHERE UserID = %s AND DeviceName = %s'
- else:
- query = "SELECT DeviceID FROM GpodderDevices WHERE UserID = %s AND DeviceName = %s"
-
- cursor.execute(query, (user_id, device_name))
- device_result = cursor.fetchone()
-
- if device_result:
- # Device exists, return its ID
- if isinstance(device_result, tuple):
- device_id = device_result[0]
- else:
- # For dict result, use the correct column name case
- device_id = device_result["DeviceID"]
- print(f"Using existing gpodder device with ID: {device_id}")
- else:
- # Create device record
- if database_type == "postgresql":
- query = '''
- INSERT INTO "GpodderDevices"
- (UserID, DeviceName, DeviceType, DeviceCaption, IsActive, LastSync)
- VALUES (%s, %s, %s, %s, TRUE, CURRENT_TIMESTAMP)
- RETURNING DeviceID
- '''
- else:
- query = '''
- INSERT INTO GpodderDevices
- (UserID, DeviceName, DeviceType, DeviceCaption, IsActive, LastSync)
- VALUES (%s, %s, %s, %s, TRUE, NOW())
- '''
-
- cursor.execute(query, (user_id, device_name, device_type, device_caption))
-
- if database_type == "postgresql":
- device_id = cursor.fetchone()[0]
- else:
- device_id = cursor.lastrowid
-
- print(f"Created gpodder device with ID: {device_id}")
-
- # Also create device sync state entry
- if database_type == "postgresql":
- state_query = '''
- INSERT INTO "GpodderSyncDeviceState" (UserID, DeviceID)
- VALUES (%s, %s)
- ON CONFLICT (UserID, DeviceID) DO NOTHING
- '''
- else:
- state_query = '''
- INSERT IGNORE INTO GpodderSyncDeviceState (UserID, DeviceID)
- VALUES (%s, %s)
- '''
-
- cursor.execute(state_query, (user_id, device_id))
-
- cnx.commit()
- cursor.close()
- return device_id
-
- except Exception as e:
- print(f"Error in create_or_get_gpodder_device: {e}")
- if 'cursor' in locals():
- cursor.close()
- return None
-
-def generate_secure_token(length=64):
- """
- Generate a secure random token for internal authentication
-
- Args:
- length: Length of the token (default: 64)
-
- Returns:
- Secure random token string
- """
- import secrets
- import string
-
- alphabet = string.ascii_letters + string.digits
- return ''.join(secrets.choice(alphabet) for _ in range(length))
-
-def set_gpodder_internal_sync(cnx, database_type, user_id):
- """
- Set up internal gpodder sync for a user with a plain, unencrypted token
- """
- try:
- # Get the username
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'SELECT Username, Pod_Sync_Type FROM "Users" WHERE UserID = %s'
- else:
- query = "SELECT Username, Pod_Sync_Type FROM Users WHERE UserID = %s"
- cursor.execute(query, (user_id,))
- user_info = cursor.fetchone()
- cursor.close()
- if not user_info:
- print(f"User not found for ID: {user_id}")
- return None
- username = user_info[0] if isinstance(user_info, tuple) else user_info["username"]
- current_sync_type = user_info[1] if isinstance(user_info, tuple) else user_info["pod_sync_type"]
-
- # Generate a new sync type based on current
- new_sync_type = current_sync_type
- if current_sync_type == "external":
- new_sync_type = "both"
- elif current_sync_type == "None" or current_sync_type is None:
- new_sync_type = "gpodder"
-
- # Generate a secure internal token - PLAIN TEXT, NO ENCRYPTION
- import secrets
- import string
- alphabet = string.ascii_letters + string.digits
- internal_token = ''.join(secrets.choice(alphabet) for _ in range(64))
-
- # Set up the local gpodder API details
- local_gpodder_url = "http://localhost:8042" # Internal API URL
-
- # Store the plain token in the database
- if database_type == "postgresql":
- query = '''
- UPDATE "Users"
- SET GpodderUrl = %s, GpodderToken = %s, GpodderLoginName = %s, Pod_Sync_Type = %s
- WHERE UserID = %s
- '''
- else:
- query = '''
- UPDATE Users
- SET GpodderUrl = %s, GpodderToken = %s, GpodderLoginName = %s, Pod_Sync_Type = %s
- WHERE UserID = %s
- '''
- cursor = cnx.cursor()
- cursor.execute(query, (local_gpodder_url, internal_token, username, new_sync_type, user_id))
- cnx.commit()
- cursor.close()
-
- # Create a default device for this user using the gPodder API
- default_device_name = f"pinepods-internal-{user_id}"
-
- # Create the device using the gPodder API
- import requests
- from requests.auth import HTTPBasicAuth
-
- # Use the API to register a device
- device_data = {
- "caption": f"PinePods Internal Device {user_id}",
- "type": "server"
- }
-
- try:
- # First, check if the device already exists
- device_list_url = f"{local_gpodder_url}/api/2/devices/{username}.json"
- response = requests.get(
- device_list_url,
- auth=HTTPBasicAuth(username, internal_token)
- )
-
- # If we can't get device list, create a new one anyway
- existing_device_id = None
- if response.status_code == 200:
- devices = response.json()
- for device in devices:
- if device.get("id") == default_device_name:
- existing_device_id = device.get("id")
- print(f"Found existing device with ID: {existing_device_id}")
- break
-
- # If device doesn't exist, create it
- if not existing_device_id:
- device_url = f"{local_gpodder_url}/api/2/devices/{username}/{default_device_name}.json"
- response = requests.post(
- device_url,
- json=device_data,
- auth=HTTPBasicAuth(username, internal_token)
- )
-
- if response.status_code in [200, 201]:
- print(f"Created device with ID: {default_device_name}")
- else:
- print(f"Failed to create device: {response.status_code} - {response.text}")
- # Continue anyway - the API might create the device on first sync
-
- # Return the device info
- return {
- "device_name": default_device_name,
- "device_id": user_id, # Use user_id as a fallback/reference
- "success": True
- }
-
- except Exception as device_err:
- print(f"Error creating device via API: {device_err}")
- # Even if device creation fails, still return success
- return {
- "device_name": default_device_name,
- "device_id": user_id,
- "success": True
- }
-
- except Exception as e:
- print(f"Error in set_gpodder_internal_sync: {e}")
- return None
-
-def disable_gpodder_internal_sync(cnx, database_type, user_id):
- """
- Disable internal gpodder sync for a user
-
- Args:
- cnx: Database connection
- database_type: Type of database (postgresql or mysql)
- user_id: User ID
-
- Returns:
- True if successful, False if failed
- """
- try:
- # Get current gpodder settings
- user_data = get_user_gpodder_status(cnx, database_type, user_id)
- if not user_data:
- print(f"User data not found for ID: {user_id}")
- return False
-
- current_sync_type = user_data["sync_type"]
-
- # Determine new sync type
- new_sync_type = current_sync_type
- if current_sync_type == "both":
- new_sync_type = "external"
- elif current_sync_type == "gpodder":
- new_sync_type = "None"
-
- # If internal API is being used, clear the settings
- if user_data.get("gpodder_url") == "http://localhost:8042":
- success = add_gpodder_settings(
- database_type,
- cnx,
- user_id,
- "", # Clear URL
- "", # Clear token
- "", # Clear login
- new_sync_type
- )
-
- if not success:
- print(f"Failed to clear gpodder settings for user: {user_id}")
- return False
- else:
- # Just update the sync type
- success = update_user_gpodder_sync(cnx, database_type, user_id, new_sync_type)
- if not success:
- print(f"Failed to update gpodder sync type for user: {user_id}")
- return False
-
- return True
-
- except Exception as e:
- print(f"Error in disable_gpodder_internal_sync: {e}")
- return False
-
-def refresh_gpodder_subscription(database_type, cnx, user_id, gpodder_url, encrypted_gpodder_token,
- gpodder_login, pod_sync_type, device_id=None, device_name=None, is_remote=False):
- """Refreshes podcasts from GPodder with proper device handling"""
- from cryptography.fernet import Fernet
- import logging
- import requests
- import base64
- from requests.auth import HTTPBasicAuth
-
- # Set up logging
- logging.basicConfig(level=logging.INFO)
- logger = logging.getLogger(__name__)
-
- try:
- # More detailed logging for debugging
- print(f"Starting refresh with parameters: user_id={user_id}, gpodder_url={gpodder_url}, " +
- f"pod_sync_type={pod_sync_type}, device_id={device_id}, device_name={device_name}, " +
- f"is_remote={is_remote}")
-
- # Flag to identify internal API calls
- is_internal_api = (gpodder_url == "http://localhost:8042")
- print(f"Is internal API: {is_internal_api}")
-
- # Determine which device to use for GPodder API calls
- actual_device_name = None
-
- # Handle device name/id logic
- if is_remote and device_name:
- # If it's a remote device, use the provided device name directly
- print(f"Using remote device name: {device_name}")
-
- # Create a local representation of the remote device
- success, message, local_device_id = handle_remote_device(cnx, database_type, user_id, device_name)
- if success:
- print(f"Created/found local device for remote device: {local_device_id}")
- # Use the local device ID instead of -1
- device_id = local_device_id
- actual_device_name = device_name
- else:
- print(f"Failed to handle remote device: {message}")
- # Proceed with just the name, but device_id will still be -1 which might cause problems
- actual_device_name = device_name
- elif device_id:
- # If a specific device ID is provided, look it up in the database
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'SELECT DeviceName FROM "GpodderDevices" WHERE DeviceID = %s'
- else:
- query = "SELECT DeviceName FROM GpodderDevices WHERE DeviceID = %s"
- cursor.execute(query, (device_id,))
- result = cursor.fetchone()
- cursor.close()
-
- if result:
- actual_device_name = result[0] if isinstance(result, tuple) else result["devicename"]
- logger.info(f"Using device from database: {actual_device_name} (ID: {device_id})")
- else:
- logger.warning(f"Device ID {device_id} not found in database, falling back to default")
- default_device = get_default_gpodder_device(cnx, database_type, user_id)
- if default_device:
- device_id = default_device["id"]
- actual_device_name = default_device["name"]
- print(f"Using default device: {actual_device_name} (ID: {device_id})")
- else:
- # No default device, create one with proper naming pattern
- if is_internal_api:
- # Use internal device naming pattern
- device_name_to_use = f"pinepods-internal-{user_id}"
- device_type_to_use = "server"
- device_caption_to_use = f"PinePods Internal Device {user_id}"
- else:
- # Use default device pattern for external gPodder
- device_name_to_use = "pinepods_default"
- device_type_to_use = "desktop"
- device_caption_to_use = "Pinepods Default Device"
-
- device_id = create_or_update_device(
- cnx,
- database_type,
- user_id,
- device_name_to_use,
- device_type_to_use,
- device_caption_to_use,
- True # Set as default
- )
- actual_device_name = device_name_to_use
- print(f"Created new default device: {actual_device_name} (ID: {device_id})")
- else:
- # No device specified, use default
- default_device = get_default_gpodder_device(cnx, database_type, user_id)
- if default_device:
- device_id = default_device["id"]
- actual_device_name = default_device["name"]
- print(f"Using default device: {actual_device_name} (ID: {device_id})")
- else:
- # No devices exist, create a default one with proper naming pattern
- if is_internal_api:
- # Use internal device naming pattern
- device_name_to_use = f"pinepods-internal-{user_id}"
- device_type_to_use = "server"
- device_caption_to_use = f"PinePods Internal Device {user_id}"
- else:
- # Use default device pattern for external gPodder
- device_name_to_use = "pinepods_default"
- device_type_to_use = "desktop"
- device_caption_to_use = "Pinepods Default Device"
-
- device_id = create_or_update_device(
- cnx,
- database_type,
- user_id,
- device_name_to_use,
- device_type_to_use,
- device_caption_to_use,
- True # Set as default
- )
- actual_device_name = device_name_to_use
- print(f"Created new default device: {actual_device_name} (ID: {device_id})")
-
- # For remote devices, we might need to skip checking local timestamps
- # and force a full sync from the GPodder server
- if is_remote:
- # Force a full sync by setting timestamp to 0
- timestamps = {"last_timestamp": 0}
- print("Remote device selected - forcing full sync with timestamp 0")
- else:
- # Get sync timestamps for local device
- timestamps = get_sync_timestamps(cnx, database_type, user_id, device_id)
-
- # Get encryption key and decrypt the GPodder token
- print("Getting encryption key...")
- encryption_key = get_encryption_key(cnx, database_type)
-
- if not encryption_key:
- logger.error("Failed to retrieve encryption key")
- return False
-
- try:
- encryption_key_bytes = base64.b64decode(encryption_key)
- cipher_suite = Fernet(encryption_key_bytes)
- except Exception as e:
- logger.error(f"Error preparing encryption key: {str(e)}")
- return False
-
- # Special handling for encrypted_gpodder_token based on input type
- if isinstance(encrypted_gpodder_token, dict):
- if "data" in encrypted_gpodder_token:
- print("Extracting token from dictionary input")
- encrypted_gpodder_token = encrypted_gpodder_token.get("data", {}).get("gpoddertoken", "")
- else:
- encrypted_gpodder_token = encrypted_gpodder_token.get("gpoddertoken", "")
-
- # Decrypt the token - with improved error handling
- gpodder_token = None
- if encrypted_gpodder_token is not None and encrypted_gpodder_token != "":
- try:
- # Handle both string and bytes formats
- if isinstance(encrypted_gpodder_token, bytes):
- decrypted_token_bytes = cipher_suite.decrypt(encrypted_gpodder_token)
- else:
- # Make sure we're working with a valid token
- token_to_decrypt = encrypted_gpodder_token
- # If the token isn't in the right format for decryption, try to fix it
- if not (token_to_decrypt.startswith(b'gAAAAA') if isinstance(token_to_decrypt, bytes)
- else token_to_decrypt.startswith('gAAAAA')):
- gpodder_token = encrypted_gpodder_token
- else:
- decrypted_token_bytes = cipher_suite.decrypt(token_to_decrypt.encode())
- gpodder_token = decrypted_token_bytes.decode()
- except Exception as e:
- logger.error(f"Error decrypting token: {str(e)}")
- # For non-internal servers, we might still want to continue with whatever token we have
- if is_internal_api:
- # For internal server, fall back to using the raw token if decryption fails
- gpodder_token = encrypted_gpodder_token
- else:
- # For external servers, continue with the encrypted token
- gpodder_token = encrypted_gpodder_token
- else:
- logger.warning("No token provided")
- if is_internal_api:
- logger.error("Token required for internal gpodder server")
- return False
-
- print(f"Using {'internal' if is_internal_api else 'external'} gpodder API at {gpodder_url}")
-
- # Create a session for cookie-based auth
- session = requests.Session()
-
- # Handle authentication for internal API calls
- if is_internal_api:
- print("Using token-based auth for internal API")
- # Use the token directly with the gPodder API
- auth = HTTPBasicAuth(gpodder_login, encrypted_gpodder_token)
-
- # Try to access API using Basic Auth
- try:
- # First, create or update the device if needed
- device_data = {
- "caption": f"PinePods Internal Device {user_id}",
- "type": "server"
- }
- device_url = f"{gpodder_url}/api/2/devices/{gpodder_login}/{actual_device_name}.json"
-
- try:
- response = requests.post(
- device_url,
- json=device_data,
- auth=auth
- )
- if response.status_code in [200, 201]:
- print(f"Updated device: {actual_device_name}")
- else:
- print(f"Note: Device update returned {response.status_code}")
- except Exception as device_err:
- print(f"Warning: Device update failed: {device_err}")
- # Continue anyway
-
- # Now get subscriptions
- subscription_url = f"{gpodder_url}/api/2/subscriptions/{gpodder_login}/{actual_device_name}.json?since={timestamps['last_timestamp']}"
- print(f"Requesting subscriptions from internal API at {subscription_url}")
- response = requests.get(subscription_url, auth=auth)
- response.raise_for_status()
- gpodder_data = response.json()
- print("Successfully retrieved data from internal API")
- use_session = False
- except Exception as e:
- logger.error(f"Failed to get subscriptions from internal API: {str(e)}")
- raise
- else:
- # For external API, use regular basic auth as before
- print("Using regular basic auth for external API")
- auth = HTTPBasicAuth(gpodder_login, gpodder_token)
-
- # Try session-based authentication (for PodFetch)
- gpodder_data = None
- use_session = False
-
- try:
- # First try to login to establish a session
- login_url = f"{gpodder_url}/api/2/auth/{gpodder_login}/login.json"
- print(f"Trying session-based authentication at {login_url}")
- login_response = session.post(login_url, auth=auth)
- login_response.raise_for_status()
- print("Session login successful")
-
- # Use the session to get subscriptions with the since parameter
- subscription_url = f"{gpodder_url}/api/2/subscriptions/{gpodder_login}/{actual_device_name}.json?since={timestamps['last_timestamp']}"
- response = session.get(subscription_url)
- response.raise_for_status()
- gpodder_data = response.json()
- use_session = True
- print("Using session-based authentication")
-
- except Exception as e:
- logger.warning(f"Session-based authentication failed: {str(e)}. Falling back to basic auth.")
- # Fall back to standard auth if session auth fails
- try:
- subscription_url = f"{gpodder_url}/api/2/subscriptions/{gpodder_login}/{actual_device_name}.json?since={timestamps['last_timestamp']}"
- print(f"Trying basic authentication at {subscription_url}")
- response = requests.get(subscription_url, auth=auth)
- response.raise_for_status()
- gpodder_data = response.json()
- print("Using basic authentication")
- except Exception as e2:
- logger.error(f"Basic auth also failed: {str(e2)}")
- raise
-
- # Store timestamp for next sync if present
- if gpodder_data and "timestamp" in gpodder_data:
- update_sync_timestamp(cnx, database_type, user_id, device_id, "last_timestamp", gpodder_data["timestamp"])
- logger.info(f"Stored timestamp: {gpodder_data['timestamp']}")
-
- # Extract subscription data
- gpodder_podcasts_add = gpodder_data.get("add", [])
- gpodder_podcasts_remove = gpodder_data.get("remove", [])
-
- print(f"gPodder podcasts to add: {gpodder_podcasts_add}")
- print(f"gPodder podcasts to remove: {gpodder_podcasts_remove}")
-
- # Get local podcasts
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'SELECT FeedURL FROM "Podcasts" WHERE UserID = %s'
- else:
- query = "SELECT FeedURL FROM Podcasts WHERE UserID = %s"
-
- cursor.execute(query, (user_id,))
- local_podcasts = set()
- for row in cursor.fetchall():
- if isinstance(row, dict):
- local_podcasts.add(row["feedurl"]) # PostgreSQL dict case
- else:
- local_podcasts.add(row[0]) # Tuple case
-
- podcasts_to_add = set(gpodder_podcasts_add) - local_podcasts
- podcasts_to_remove = set(gpodder_podcasts_remove) & local_podcasts
-
- # Track successful additions and removals for sync
- successful_additions = set()
- successful_removals = set()
-
- # Add new podcasts with individual error handling
- print("Adding new podcasts...")
- for feed_url in podcasts_to_add:
- try:
- podcast_values = get_podcast_values(feed_url, user_id)
- feed_cutoff = 30
- return_value = add_podcast(cnx, database_type, podcast_values, user_id, feed_cutoff)
- if return_value:
- print(f"Successfully added {feed_url}")
- successful_additions.add(feed_url)
- else:
- logger.error(f"Failed to add {feed_url}")
- except Exception as e:
- logger.error(f"Error processing {feed_url}: {str(e)}")
- continue # Continue with next podcast even if this one fails
-
- # Remove podcasts with individual error handling
- print("Removing podcasts...")
- for feed_url in podcasts_to_remove:
- try:
- if database_type == "postgresql":
- query = 'SELECT PodcastName FROM "Podcasts" WHERE FeedURL = %s'
- else:
- query = "SELECT PodcastName FROM Podcasts WHERE FeedURL = %s"
-
- cursor.execute(query, (feed_url,))
- result = cursor.fetchone()
-
- if result:
- podcast_name = result[0]
- if remove_podcast(cnx, database_type, podcast_name, feed_url, user_id):
- successful_removals.add(feed_url)
- print(f"Successfully removed {feed_url}")
- else:
- logger.error(f"Failed to remove {feed_url}")
- else:
- logger.warning(f"No podcast found with URL: {feed_url}")
- except Exception as e:
- logger.error(f"Error removing {feed_url}: {str(e)}")
- continue
-
- cnx.commit()
- cursor.close()
-
- # Process episode actions using the correct device
- try:
- print(f"Authentication method: {'session' if use_session else 'basic auth'}")
- if use_session:
- print("Using SESSION authentication for episode actions")
- process_episode_actions_session(
- session,
- gpodder_url,
- gpodder_login,
- cnx,
- database_type,
- user_id,
- actual_device_name,
- device_id
- )
- else:
- print("Using BASIC authentication for episode actions")
- process_episode_actions(
- gpodder_url,
- gpodder_login,
- auth,
- cnx,
- database_type,
- user_id,
- actual_device_name,
- device_id
- )
- except Exception as e:
- logger.error(f"Error processing episode actions: {str(e)}")
-
- # Sync local episode times
- try:
- if use_session:
- sync_local_episode_times_session(
- session,
- gpodder_url,
- gpodder_login,
- cnx,
- database_type,
- user_id,
- actual_device_name
- )
- else:
- sync_local_episode_times(
- gpodder_url,
- gpodder_login,
- auth,
- cnx,
- database_type,
- user_id,
- actual_device_name
- )
- except Exception as e:
- logger.error(f"Error syncing local episode times: {str(e)}")
-
- return True
- except Exception as e:
- logger.error(f"Major error in refresh_gpodder_subscription: {str(e)}")
- return False
-
-def sync_local_episode_times_session(session, gpodder_url, gpodder_login, cnx, database_type, user_id, device_name=None, UPLOAD_BULK_SIZE=30):
- """Sync local episode times using session-based authentication"""
- from datetime import datetime
- try:
- # If no device name is provided, get the user's default device
- if not device_name:
- default_device = get_default_gpodder_device(cnx, database_type, user_id)
- if default_device:
- device_name = default_device["name"]
- else:
- print("WARNING: No devices found for user, episode actions will fail")
- return
-
- # Get local episode times
- local_episode_times = get_local_episode_times(cnx, database_type, user_id)
-
- # Skip if no episodes to sync
- if not local_episode_times:
- print("No episodes to sync")
- return
-
- # Format actions with all the required fields
- actions = []
-
- # Format timestamp as ISO string
- current_time = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S")
-
- for episode_time in local_episode_times:
- # Only include episodes with valid duration data
- if episode_time.get("episode_duration") and episode_time.get("listen_duration"):
- if not episode_time.get("podcast_url") or not episode_time.get("episode_url"):
- print(f"Skipping episode with missing URL data")
- continue
-
- # If episode is completed, set position to total duration
- position = (episode_time["episode_duration"]
- if episode_time.get("completed", False)
- else episode_time["listen_duration"])
-
- # Add all required fields including device
- action = {
- "podcast": episode_time["podcast_url"],
- "episode": episode_time["episode_url"],
- "action": "play",
- "position": int(position),
- "total": int(episode_time["episode_duration"]),
- "timestamp": current_time,
- "device": device_name,
- "started": 0 # Required by some implementations
- }
-
- # Add guid if available
- if episode_time.get("guid"):
- action["guid"] = episode_time["guid"]
-
- actions.append(action)
-
- if not actions:
- print("No valid actions to send")
- return
-
- print(f"Prepared {len(actions)} actions to send")
- print(f"First action device name: {actions[0]['device']}")
-
- # Split into chunks and process
- actions_chunks = [
- actions[i:i + UPLOAD_BULK_SIZE]
- for i in range(0, len(actions), UPLOAD_BULK_SIZE)
- ]
-
- for chunk in actions_chunks:
- try:
- response = session.post(
- f"{gpodder_url}/api/2/episodes/{gpodder_login}.json",
- json=chunk, # Send as array
- headers={"Content-Type": "application/json"}
- )
-
- if response.status_code < 300:
- print(f"Successfully synced {len(chunk)} episode actions")
- else:
- print(f"Error syncing episode actions: {response.status_code} - {response.text}")
-
- # Debug the request
- print(f"Request URL: {gpodder_url}/api/2/episodes/{gpodder_login}.json")
- print(f"Request headers: {session.headers}")
- print(f"First few actions in chunk: {chunk[:2]}")
- except Exception as e:
- print(f"Error sending actions: {str(e)}")
- continue
-
- except Exception as e:
- print(f"Error in sync_local_episode_times_session: {str(e)}")
-
-
-def set_default_gpodder_device(cnx, database_type, user_id, device_id):
- """
- Sets a device as the user's default GPodder device.
- This will unset any previous default device.
-
- Args:
- cnx: Database connection
- database_type: "postgresql" or "mariadb"
- user_id: User ID
- device_id: Device ID to set as default
-
- Returns:
- bool: Success or failure
- """
- try:
- cursor = cnx.cursor()
-
- # First verify the device exists and belongs to the user
- if database_type == "postgresql":
- query = 'SELECT DeviceID FROM "GpodderDevices" WHERE DeviceID = %s AND UserID = %s'
- else:
- query = 'SELECT DeviceID FROM GpodderDevices WHERE DeviceID = %s AND UserID = %s'
-
- cursor.execute(query, (device_id, user_id))
- if not cursor.fetchone():
- print(f"Device ID {device_id} does not exist or doesn't belong to user {user_id}")
- return False
-
- # Start a transaction
- if database_type == "postgresql":
- # First, unset the current default device if any
- cursor.execute("""
- UPDATE "GpodderDevices"
- SET IsDefault = FALSE
- WHERE UserID = %s AND IsDefault = TRUE
- """, (user_id,))
-
- # Then set the new default device
- cursor.execute("""
- UPDATE "GpodderDevices"
- SET IsDefault = TRUE
- WHERE DeviceID = %s
- """, (device_id,))
- else:
- # First, unset the current default device if any
- cursor.execute("""
- UPDATE GpodderDevices
- SET IsDefault = FALSE
- WHERE UserID = %s AND IsDefault = TRUE
- """, (user_id,))
-
- # Then set the new default device
- cursor.execute("""
- UPDATE GpodderDevices
- SET IsDefault = TRUE
- WHERE DeviceID = %s
- """, (device_id,))
-
- cnx.commit()
- print(f"Set default GPodder device {device_id} for user {user_id}")
- return True
- except Exception as e:
- print(f"Error setting default GPodder device: {e}")
- cnx.rollback()
- return False
- finally:
- cursor.close()
-
-def get_default_gpodder_device(cnx, database_type, user_id):
- """
- Gets the user's default GPodder device.
- If no default is set, returns the oldest device.
-
- Args:
- cnx: Database connection
- database_type: "postgresql" or "mariadb"
- user_id: User ID
-
- Returns:
- dict: Device information or None if no devices exist
- """
- try:
- cursor = cnx.cursor()
-
- # First try to get the default device
- if database_type == "postgresql":
- query = """
- SELECT DeviceID, DeviceName, DeviceType, DeviceCaption, LastSync, IsActive
- FROM "GpodderDevices"
- WHERE UserID = %s AND IsDefault = TRUE
- LIMIT 1
- """
- else:
- query = """
- SELECT DeviceID, DeviceName, DeviceType, DeviceCaption, LastSync, IsActive
- FROM GpodderDevices
- WHERE UserID = %s AND IsDefault = TRUE
- LIMIT 1
- """
-
- cursor.execute(query, (user_id,))
- result = cursor.fetchone()
-
- if result:
- # Return the default device
- if isinstance(result, dict):
- return {
- "id": result["deviceid"],
- "name": result["devicename"],
- "type": result["devicetype"],
- "caption": result["devicecaption"],
- "last_sync": result["lastsync"],
- "is_active": result["isactive"],
- "is_remote": False,
- "is_default": True
- }
- else:
- return {
- "id": result[0],
- "name": result[1],
- "type": result[2],
- "caption": result[3],
- "last_sync": result[4],
- "is_active": result[5],
- "is_remote": False,
- "is_default": True
- }
-
- # If no default device is set, get the oldest device
- if database_type == "postgresql":
- query = """
- SELECT DeviceID, DeviceName, DeviceType, DeviceCaption, LastSync, IsActive
- FROM "GpodderDevices"
- WHERE UserID = %s
- ORDER BY DeviceID ASC
- LIMIT 1
- """
- else:
- query = """
- SELECT DeviceID, DeviceName, DeviceType, DeviceCaption, LastSync, IsActive
- FROM GpodderDevices
- WHERE UserID = %s
- ORDER BY DeviceID ASC
- LIMIT 1
- """
-
- cursor.execute(query, (user_id,))
- result = cursor.fetchone()
-
- if result:
- # Return the oldest device
- if isinstance(result, dict):
- return {
- "id": result["deviceid"],
- "name": result["devicename"],
- "type": result["devicetype"],
- "caption": result["devicecaption"],
- "last_sync": result["lastsync"],
- "is_active": result["isactive"],
- "is_remote": False,
- "is_default": False
- }
- else:
- return {
- "id": result[0],
- "name": result[1],
- "type": result[2],
- "caption": result[3],
- "last_sync": result[4],
- "is_active": result[5],
- "is_remote": False,
- "is_default": False
- }
-
- # No devices found
- return None
- except Exception as e:
- print(f"Error getting default GPodder device: {e}")
- return None
- finally:
- cursor.close()
-
-
-def sync_local_episode_times(gpodder_url, gpodder_login, auth, cnx, database_type, user_id, device_name="default", UPLOAD_BULK_SIZE=30):
- """Sync local episode times using basic authentication"""
- import logging
- from datetime import datetime
- import requests
-
- logger = logging.getLogger(__name__)
-
- try:
- local_episode_times = get_local_episode_times(cnx, database_type, user_id)
- update_actions = []
-
- for episode_time in local_episode_times:
- # Only include episodes with valid duration data
- if episode_time.get("episode_duration") and episode_time.get("listen_duration"):
- # If episode is completed, set position to total duration
- position = (episode_time["episode_duration"]
- if episode_time.get("completed", False)
- else episode_time["listen_duration"])
-
- action = {
- "podcast": episode_time["podcast_url"],
- "episode": episode_time["episode_url"],
- "action": "play",
- "timestamp": datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S"),
- "position": int(position),
- "started": 0,
- "total": int(episode_time["episode_duration"]),
- "device": device_name # Use the specified device name
- }
-
- # Add guid if available
- if episode_time.get("guid"):
- action["guid"] = episode_time["guid"]
-
- update_actions.append(action)
-
- # Skip if no actions to send
- if not update_actions:
- logger.info("No episode actions to upload")
- return
-
- # Split into chunks and process
- update_actions_chunks = [
- update_actions[i:i + UPLOAD_BULK_SIZE]
- for i in range(0, len(update_actions), UPLOAD_BULK_SIZE)
- ]
-
- for chunk in update_actions_chunks:
- try:
- response = requests.post(
- f"{gpodder_url}/api/2/episodes/{gpodder_login}.json",
- json=chunk,
- auth=auth,
- headers={"Accept": "application/json", "Content-Type": "application/json"}
- )
- response.raise_for_status()
- logger.info(f"Successfully synced {len(chunk)} episode actions")
- except Exception as e:
- logger.error(f"Error uploading chunk: {str(e)}")
- continue
-
- except Exception as e:
- logger.error(f"Error syncing local episode times: {str(e)}")
- raise
-
-def process_episode_actions_session(session, gpodder_url, gpodder_login, cnx, database_type, user_id, device_name, device_id):
- """Process incoming episode actions from gPodder using session-based authentication"""
- logger = logging.getLogger(__name__)
- print('running episode actions')
-
- try:
- # Get timestamp for since parameter
- timestamps = get_sync_timestamps(cnx, database_type, user_id, device_id)
- episodes_timestamp = timestamps["episodes_timestamp"]
- print('got timestamps')
-
- # Get episode actions with session and since parameter
- episode_actions_response = session.get(
- f"{gpodder_url}/api/2/episodes/{gpodder_login}.json?since={episodes_timestamp}&device={device_name}"
- )
- episode_actions_response.raise_for_status()
- episode_actions = episode_actions_response.json()
- print('got actions')
-
- # Store timestamp for future requests
- if "timestamp" in episode_actions:
- update_sync_timestamp(cnx, database_type, user_id, device_id, "episodes_timestamp", episode_actions["timestamp"])
- print('stamp stored')
- # Process each action
- cursor = cnx.cursor()
- for action in episode_actions.get('actions', []):
- print('processing')
- try:
- if action["action"].lower() in ["play", "update_time"]:
- if "position" in action and action["position"] != -1:
- episode_id = get_episode_id_by_url(cnx, database_type, action["episode"])
- if episode_id:
- # Update listen duration
- record_listen_duration(cnx, database_type, episode_id, user_id, int(action["position"]))
- # Check for completion
- if ("total" in action and action["total"] > 0 and
- action["position"] >= action["total"]):
- if database_type == "postgresql":
- update_query = '''
- UPDATE "Episodes"
- SET Completed = TRUE
- WHERE EpisodeID = %s
- '''
- else:
- update_query = '''
- UPDATE Episodes
- SET Completed = TRUE
- WHERE EpisodeID = %s
- '''
- cursor.execute(update_query, (episode_id,))
- cnx.commit()
- print(f"Marked episode {episode_id} as completed")
- except Exception as e:
- logger.error(f"Error processing episode action {action}: {str(e)}")
- continue
- cursor.close()
- except Exception as e:
- logger.error(f"Error fetching episode actions with session: {str(e)}")
- raise
-
-def process_episode_actions(gpodder_url, gpodder_login, auth, cnx, database_type, user_id, device_name, device_id):
- """Process incoming episode actions from gPodder using basic authentication"""
- logger = logging.getLogger(__name__)
- print('Running episode actions with basic auth')
- try:
- # Get timestamp for since parameter
- timestamps = get_sync_timestamps(cnx, database_type, user_id, device_id)
- episodes_timestamp = timestamps["episodes_timestamp"]
- print(f'Got timestamps: {episodes_timestamp}')
-
- # Always include device parameter, even if it's empty
- url = f"{gpodder_url}/api/2/episodes/{gpodder_login}.json?since={episodes_timestamp}"
- if device_name:
- url += f"&device={device_name}"
-
- print(f"Episode actions API URL: {url}")
-
- # Get episode actions with basic auth
- episode_actions_response = requests.get(url, auth=auth)
- print(f"Episode actions response status: {episode_actions_response.status_code}")
-
- # Log the raw response for debugging
- response_text = episode_actions_response.text
- print(f"Raw response: {response_text[:200]}...") # Log first 200 chars
-
- episode_actions_response.raise_for_status()
-
- # Parse the JSON response
- episode_actions = episode_actions_response.json()
- print(f"Response keys: {episode_actions.keys()}")
-
- # Store timestamp for future requests
- if "timestamp" in episode_actions:
- update_sync_timestamp(cnx, database_type, user_id, device_id, "episodes_timestamp", episode_actions["timestamp"])
- print(f'Updated timestamp to {episode_actions["timestamp"]}')
-
- # Check if 'actions' key exists before processing
- if 'actions' not in episode_actions:
- print("No 'actions' key in response. Response structure: %s", episode_actions)
- return # Exit early if no actions to process
-
- # Process each action - same as in session version
- cursor = cnx.cursor()
- for action in episode_actions.get('actions', []):
- try:
- print(f"Processing action: {action}")
-
- if "action" not in action:
- print(f"Action missing 'action' key: {action}")
- continue
-
- if action["action"].lower() in ["play", "update_time"]:
- if "position" in action and action["position"] != -1:
- # Check if episode key exists
- if "episode" not in action:
- print(f"Action missing 'episode' key: {action}")
- continue
-
- episode_id = get_episode_id_by_url(cnx, database_type, action["episode"])
-
- if not episode_id:
- print(f"No episode found for URL: {action['episode']}")
- continue
-
- # Update listen duration
- record_listen_duration(cnx, database_type, episode_id, user_id, int(action["position"]))
- print(f"Updated listen duration for episode {episode_id}")
-
- # Check for completion
- if ("total" in action and action["total"] > 0 and
- action["position"] >= action["total"]):
- if database_type == "postgresql":
- update_query = '''
- UPDATE "Episodes"
- SET Completed = TRUE
- WHERE EpisodeID = %s
- '''
- else:
- update_query = '''
- UPDATE Episodes
- SET Completed = TRUE
- WHERE EpisodeID = %s
- '''
- cursor.execute(update_query, (episode_id,))
- cnx.commit()
- print(f"Marked episode {episode_id} as completed")
- except Exception as e:
- logger.error(f"Error processing episode action {action}: {str(e)}")
- # Continue with next action rather than breaking
- continue
- cursor.close()
- except Exception as e:
- logger.error(f"Error fetching episode actions with basic auth: {str(e)}", exc_info=True)
- raise
-
-def force_full_sync_to_gpodder(database_type, cnx, user_id, gpodder_url, encrypted_gpodder_token, gpodder_login, device_id=None, device_name=None, is_remote=False):
- """Force a full sync of all local podcasts to the GPodder server"""
- from cryptography.fernet import Fernet
- from requests.auth import HTTPBasicAuth
- import requests
- import logging
- import base64 # Make sure to import base64
-
- print(f"Starting GPodder sync with: device_id={device_id}, device_name={device_name}, is_remote={is_remote}")
-
- try:
- # Check if this is the internal API
- is_internal_api = (gpodder_url == "http://localhost:8042")
- print(f"Is internal API: {is_internal_api}")
-
- # Use provided device_id or get/create default
- if device_id is None or device_id <= 0: # Handle negative IDs for remote devices
- device_id = get_or_create_default_device(cnx, database_type, user_id)
- print(f"Using default device with ID: {device_id}")
- else:
- print(f"Using provided device ID: {device_id}")
-
- # Use provided device_name or get from database
- if device_name is None:
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'SELECT DeviceName FROM "GpodderDevices" WHERE DeviceID = %s'
- else:
- query = "SELECT DeviceName FROM GpodderDevices WHERE DeviceID = %s"
- cursor.execute(query, (device_id,))
- result = cursor.fetchone()
- if result:
- device_name = result[0] if isinstance(result, tuple) else result["devicename"]
- print(f"Found device name from database: {device_name}")
- else:
- # Fallback to default name if query returns nothing
- device_name = "pinepods_default"
- print(f"No device name found, using default: {device_name}")
- cursor.close()
- else:
- print(f"Using provided device name: {device_name}")
-
- # Handle token based on whether it's internal or external API
- gpodder_token = None
- if is_internal_api:
- # For internal API, use the token directly without decryption
- gpodder_token = encrypted_gpodder_token
- print("Using raw token for internal API")
- else:
- # For external API, decrypt the token
- try:
- # Fetch encryption key
- encryption_key = get_encryption_key(cnx, database_type)
- encryption_key_bytes = base64.b64decode(encryption_key)
- cipher_suite = Fernet(encryption_key_bytes)
-
- # Decrypt the token
- if encrypted_gpodder_token is not None:
- decrypted_token_bytes = cipher_suite.decrypt(encrypted_gpodder_token.encode())
- gpodder_token = decrypted_token_bytes.decode()
- print("Successfully decrypted token for external API")
- else:
- gpodder_token = None
- print("Warning: No GPodder token provided")
- except Exception as e:
- print(f"Error decrypting token: {str(e)}")
- # Use the token as-is if decryption fails
- gpodder_token = encrypted_gpodder_token
- print("Using encrypted token as fallback due to decryption error")
-
- # Create auth
- auth = HTTPBasicAuth(gpodder_login, gpodder_token)
-
- # Get all local podcasts
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'SELECT FeedURL FROM "Podcasts" WHERE UserID = %s'
- else:
- query = "SELECT FeedURL FROM Podcasts WHERE UserID = %s"
- cursor.execute(query, (user_id,))
-
- local_podcasts = []
- for row in cursor.fetchall():
- if isinstance(row, dict):
- local_podcasts.append(row["feedurl"])
- else:
- local_podcasts.append(row[0])
-
- print(f"Found {len(local_podcasts)} local podcasts to sync")
-
- # For internal API, skip session-based login and go straight to basic auth
- if is_internal_api:
- print("Internal API detected - skipping session login and using basic auth directly")
- subscription_url = f"{gpodder_url}/api/2/subscriptions/{gpodder_login}/{device_name}.json"
-
- # Try PUT request first (standard method)
- try:
- print(f"Sending PUT request with basic auth to: {subscription_url}")
-
- response = requests.put(
- subscription_url,
- json=local_podcasts,
- auth=auth,
- headers={"Content-Type": "application/json"}
- )
-
- print(f"PUT response status: {response.status_code}")
- response.raise_for_status()
- print("Successfully pushed all podcasts to internal GPodder API")
- return True
- except Exception as e:
- print(f"PUT request failed: {str(e)}")
-
- # Fall back to POST with update format
- try:
- print("Trying POST with update format...")
- payload = {
- "add": local_podcasts,
- "remove": []
- }
-
- response = requests.post(
- subscription_url,
- json=payload,
- auth=auth,
- headers={"Content-Type": "application/json"}
- )
-
- response.raise_for_status()
- print("Successfully updated podcasts using POST method")
- return True
- except Exception as e2:
- print(f"POST request failed: {str(e2)}")
- return False
- else:
- # For external API, try session login first
- try:
- # Try to login first to establish a session
- session = requests.Session()
- login_url = f"{gpodder_url}/api/2/auth/{gpodder_login}/login.json"
- print(f"Logging in to external GPodder at: {login_url}")
- login_response = session.post(login_url, auth=auth)
- login_response.raise_for_status()
- print("Session login successful for full sync")
-
- # Use PUT request to update subscriptions
- subscription_url = f"{gpodder_url}/api/2/subscriptions/{gpodder_login}/{device_name}.json"
- print(f"Sending PUT request to: {subscription_url}")
-
- # Debug the payload
- print(f"Sending payload: {local_podcasts[:3]}... (showing first 3 of {len(local_podcasts)})")
-
- response = session.put(
- subscription_url,
- json=local_podcasts,
- headers={"Content-Type": "application/json"}
- )
-
- # Check response
- print(f"PUT response status: {response.status_code}")
- print(f"PUT response text: {response.text[:200]}...") # Show first 200 chars
-
- response.raise_for_status()
- print(f"Successfully pushed all podcasts to GPodder")
- return True
-
- except Exception as e:
- print(f"Session-based sync failed: {str(e)}. Falling back to basic auth.")
- try:
- # Try a different method - POST with the update API
- try:
- print("Trying POST to subscriptions-update API...")
- update_url = f"{gpodder_url}/api/2/subscriptions/{gpodder_login}/{device_name}.json"
- payload = {
- "add": local_podcasts,
- "remove": []
- }
- response = requests.post(
- update_url,
- json=payload,
- auth=auth,
- headers={"Content-Type": "application/json"}
- )
- response.raise_for_status()
- print(f"Successfully updated podcasts using POST method")
- return True
- except Exception as e3:
- print(f"Failed with POST method: {str(e3)}")
-
- # Fall back to basic auth with PUT
- print("Falling back to basic auth with PUT...")
- subscription_url = f"{gpodder_url}/api/2/subscriptions/{gpodder_login}/{device_name}.json"
- response = requests.put(
- subscription_url,
- json=local_podcasts,
- auth=auth,
- headers={"Content-Type": "application/json"}
- )
-
- # Check response
- print(f"Basic auth PUT response status: {response.status_code}")
- print(f"Basic auth PUT response text: {response.text[:200]}...") # Show first 200 chars
-
- response.raise_for_status()
- print(f"Successfully pushed all podcasts to GPodder using basic auth")
- return True
- except Exception as e2:
- print(f"Failed to push podcasts with basic auth: {str(e2)}")
- return False
-
- except Exception as e:
- print(f"Error in force_full_sync_to_gpodder: {str(e)}")
- return False
-
-def sync_subscription_change_gpodder_with_device(gpodder_url, gpodder_login, auth, device_name, add=None, remove=None):
- """Sync subscription changes using device name"""
- import requests
- import logging
-
- logger = logging.getLogger(__name__)
-
- add = add or []
- remove = remove or []
-
- payload = {
- "add": add,
- "remove": remove
- }
-
- try:
- response = requests.post(
- f"{gpodder_url}/api/2/subscriptions/{gpodder_login}/{device_name}.json",
- json=payload,
- auth=auth
- )
- response.raise_for_status()
- logger.info(f"Subscription changes synced with gPodder: {response.text}")
- return response.json()
- except Exception as e:
- logger.error(f"Error syncing subscription changes: {str(e)}")
- return None
-
-def sync_subscription_change_gpodder_session_with_device(session, gpodder_url, gpodder_login, device_name, add=None, remove=None):
- """Sync subscription changes using session-based authentication with device name"""
- import logging
-
- logger = logging.getLogger(__name__)
-
- add = add or []
- remove = remove or []
-
- payload = {
- "add": add,
- "remove": remove
- }
-
- try:
- response = session.post(
- f"{gpodder_url}/api/2/subscriptions/{gpodder_login}/{device_name}.json",
- json=payload,
- headers={"Content-Type": "application/json"}
- )
- response.raise_for_status()
- logger.info(f"Subscription changes synced with gPodder using session: {response.text}")
- return response.json()
- except Exception as e:
- logger.error(f"Error syncing subscription changes with session: {str(e)}")
- return None
-
-def subscribe_to_person(cnx, database_type, user_id: int, person_id: int, person_name: str, person_img: str, podcast_id: int) -> tuple[bool, int]:
- cursor = cnx.cursor()
- try:
- print(f"Starting subscribe_to_person with: user_id={user_id}, person_id={person_id}, person_name={person_name}, podcast_id={podcast_id}")
-
- if database_type == "postgresql":
- # Check if a person with the same PeopleDBID (if not 0) or Name (if PeopleDBID is 0) exists
- if person_id != 0:
- query = """
- SELECT PersonID, AssociatedPodcasts FROM "People"
- WHERE UserID = %s AND PeopleDBID = %s
- """
- cursor.execute(query, (user_id, person_id))
- else:
- query = """
- SELECT PersonID, AssociatedPodcasts FROM "People"
- WHERE UserID = %s AND Name = %s AND PeopleDBID = 0
- """
- cursor.execute(query, (user_id, person_name))
-
- existing_person = cursor.fetchone()
- print(f"Query result: {existing_person}")
-
- if existing_person:
- print("Found existing person, updating...")
- # Person exists, update AssociatedPodcasts and possibly update image/description
- person_id, associated_podcasts = existing_person
- podcast_list = associated_podcasts.split(',') if associated_podcasts else []
- if str(podcast_id) not in podcast_list:
- podcast_list.append(str(podcast_id))
- new_associated_podcasts = ','.join(podcast_list)
- update_query = """
- UPDATE "People"
- SET AssociatedPodcasts = %s,
- PersonImg = COALESCE(%s, PersonImg)
- WHERE PersonID = %s
- """
- print(f"Executing update query: {update_query} with params: ({new_associated_podcasts}, {person_img}, {person_id})")
- cursor.execute(update_query, (new_associated_podcasts, person_img, person_id))
- return True, person_id
- else:
- print("No existing person found, inserting new record...")
- # Person doesn't exist, insert new record with image and description
- insert_query = """
- INSERT INTO "People"
- (UserID, PeopleDBID, Name, PersonImg, AssociatedPodcasts)
- VALUES (%s, %s, %s, %s, %s)
- RETURNING PersonID;
- """
- print(f"Executing insert query: {insert_query} with params: ({user_id}, {person_id}, {person_name}, {person_img}, {str(podcast_id)})")
- cursor.execute(insert_query, (user_id, person_id, person_name, person_img, str(podcast_id)))
- result = cursor.fetchone()
- print(f"Insert result: {result}")
- if result is not None:
- # Handle both tuple and dict return types
- if isinstance(result, dict):
- new_person_id = result['personid']
- else:
- new_person_id = result[0]
- print(f"Insert successful, new PersonID: {new_person_id}")
- cnx.commit()
- return True, new_person_id
- else:
- print("Insert did not return a PersonID")
- cnx.rollback()
- return False, 0
-
- else: # MariaDB
- # Check if person exists
- if person_id != 0:
- query = """
- SELECT PersonID, AssociatedPodcasts FROM People
- WHERE UserID = %s AND PeopleDBID = %s
- """
- cursor.execute(query, (user_id, person_id))
- else:
- query = """
- SELECT PersonID, AssociatedPodcasts FROM People
- WHERE UserID = %s AND Name = %s AND PeopleDBID = 0
- """
- cursor.execute(query, (user_id, person_name))
-
- existing_person = cursor.fetchone()
- print(f"Query result: {existing_person}")
-
- if existing_person:
- print("Found existing person, updating...")
- # Person exists, update AssociatedPodcasts
- person_id = existing_person[0] # MariaDB returns tuple
- associated_podcasts = existing_person[1]
- podcast_list = associated_podcasts.split(',') if associated_podcasts else []
-
- if str(podcast_id) not in podcast_list:
- podcast_list.append(str(podcast_id))
- new_associated_podcasts = ','.join(podcast_list)
-
- update_query = """
- UPDATE People
- SET AssociatedPodcasts = %s,
- PersonImg = COALESCE(%s, PersonImg)
- WHERE PersonID = %s
- """
- print(f"Executing update query: {update_query} with params: ({new_associated_podcasts}, {person_img}, {person_id})")
- cursor.execute(update_query, (new_associated_podcasts, person_img, person_id))
- cnx.commit()
- return True, person_id
- else:
- print("No existing person found, inserting new record...")
- # Person doesn't exist, insert new record
- insert_query = """
- INSERT INTO People
- (UserID, PeopleDBID, Name, PersonImg, AssociatedPodcasts)
- VALUES (%s, %s, %s, %s, %s)
- """
- print(f"Executing insert query: {insert_query} with params: ({user_id}, {person_id}, {person_name}, {person_img}, {str(podcast_id)})")
- cursor.execute(insert_query, (user_id, person_id, person_name, person_img, str(podcast_id)))
- cnx.commit()
-
- # Get the inserted ID
- new_person_id = cursor.lastrowid
- print(f"Insert successful, new PersonID: {new_person_id}")
-
- if new_person_id:
- return True, new_person_id
- else:
- print("Insert did not return a PersonID")
- cnx.rollback()
- return False, 0
-
- except Exception as e:
- print(f"Detailed error in subscribe_to_person: {str(e)}\nType: {type(e)}")
- import traceback
- print(f"Traceback: {traceback.format_exc()}")
- cnx.rollback()
- return False, 0
- finally:
- cursor.close()
-
- return False, 0 # In case we somehow get here
-
-def unsubscribe_from_person(cnx, database_type, user_id: int, person_id: int, person_name: str) -> bool:
- cursor = cnx.cursor()
- try:
- print(f"Attempting to unsubscribe user {user_id} from person {person_name} (ID: {person_id})")
- if database_type == "postgresql":
- # Use PersonID instead of PeopleDBID for looking up the record to delete
- person_query = 'SELECT PersonID FROM "People" WHERE UserID = %s AND PersonID = %s'
- print(f"Searching for person with query: {person_query} and params: {user_id}, {person_id}")
- cursor.execute(person_query, (user_id, person_id))
-
- else:
- person_query = "SELECT PersonID FROM People WHERE UserID = %s AND PersonID = %s"
- cursor.execute(person_query, (user_id, person_id))
-
- result = cursor.fetchone()
- print(f"Query result: {result}")
- if not result:
- print(f"No person found for user {user_id} with ID {person_id}")
- return False
-
- # Handle both tuple and dict return types
- # Handle both tuple and dict return types
- if isinstance(result, dict):
- person_db_id = result['personid']
- else:
- person_db_id = result[0]
- print(f"Found PersonID: {person_db_id}")
-
- if database_type == "postgresql":
- check_query = 'SELECT COUNT(*) FROM "People" WHERE PersonID = %s'
- delete_query = 'DELETE FROM "People" WHERE PersonID = %s'
- else:
- check_query = "SELECT COUNT(*) FROM People WHERE PersonID = %s"
- delete_query = "DELETE FROM People WHERE PersonID = %s"
-
- # Check subscriber count for both database types
- cursor.execute(check_query, (person_id,))
- subscriber_count = cursor.fetchone()[0]
-
- # Only delete episodes if this is the last subscriber
- if subscriber_count <= 1:
- if database_type == "postgresql":
- episodes_query = 'DELETE FROM "PeopleEpisodes" WHERE PersonID = %s'
- else:
- episodes_query = "DELETE FROM PeopleEpisodes WHERE PersonID = %s"
-
- print(f"Deleting episodes for PersonID {person_db_id}")
- cursor.execute(episodes_query, (person_db_id,))
- episode_count = cursor.rowcount
- print(f"Deleted {episode_count} episodes")
-
- # Always delete the person record for this user
- print(f"Deleting person record for PersonID {person_db_id}")
- cursor.execute(delete_query, (person_db_id,))
- person_count = cursor.rowcount
- print(f"Deleted {person_count} person records")
-
- cnx.commit()
- return True
-
- except Exception as e:
- print(f"Error unsubscribing from person: {str(e)}")
- print(f"Error type: {type(e)}")
- if hasattr(e, '__cause__'):
- print(f"Cause: {e.__cause__}")
- cnx.rollback()
- return False
- finally:
- cursor.close()
-
-def get_person_subscriptions(cnx, database_type, user_id: int) -> List[dict]:
- try:
- if database_type == "postgresql":
- cursor = cnx.cursor(row_factory=dict_row)
- query = 'SELECT * FROM "People" WHERE UserID = %s'
- else: # MySQL or MariaDB
- cursor = cnx.cursor(dictionary=True)
- query = "SELECT * FROM People WHERE UserID = %s"
-
- cursor.execute(query, (user_id,))
- result = cursor.fetchall()
- print(f"Raw database result: {result}")
-
- formatted_result = []
- for row in result:
- if database_type == "postgresql":
- # PostgreSQL returns lowercase keys
- formatted_row = {
- 'personid': int(row['personid']),
- 'userid': int(row['userid']),
- 'name': row['name'],
- 'image': row['personimg'],
- 'peopledbid': int(row['peopledbid']) if row['peopledbid'] is not None else None,
- 'associatedpodcasts': row['associatedpodcasts'],
- }
- else:
- # MariaDB returns uppercase keys
- formatted_row = {
- 'personid': int(row['PersonID']),
- 'userid': int(row['UserID']),
- 'name': row['Name'],
- 'image': row['PersonImg'],
- 'peopledbid': int(row['PeopleDBID']) if row['PeopleDBID'] is not None else None,
- 'associatedpodcasts': row['AssociatedPodcasts'],
- }
- formatted_result.append(formatted_row)
-
- return formatted_result
-
- except Exception as e:
- print(f"Error getting person subscriptions: {e}")
- import traceback
- print(f"Traceback: {traceback.format_exc()}")
- return []
- finally:
- cursor.close()
-
-
-def backup_user(database_type, cnx, user_id):
- if database_type == "postgresql":
- cursor = cnx.cursor(row_factory=psycopg.rows.dict_row)
- query_fetch_podcasts = 'SELECT PodcastName, FeedURL FROM "Podcasts" WHERE UserID = %s'
- else:
- cursor = cnx.cursor(dictionary=True)
- query_fetch_podcasts = "SELECT PodcastName, FeedURL FROM Podcasts WHERE UserID = %s"
-
- cursor.execute(query_fetch_podcasts, (user_id,))
- podcasts = cursor.fetchall()
- cursor.close()
-
- opml_content = '\n\n \n Podcast Subscriptions\n \n \n'
-
- if database_type == "postgresql":
- for podcast in podcasts:
- opml_content += f' \n'
- else:
- for podcast in podcasts:
- opml_content += f' \n'
-
- opml_content += ' \n'
-
- return opml_content
-
-
-
-def backup_server(database_type, cnx, database_pass):
- # Get database name from environment variable
- db_name = os.environ.get("DB_NAME", "pinepods_database") # Default to pinepods_database if not set
- db_host = os.environ.get("DB_HOST", "db")
- db_port = os.environ.get("DB_PORT", "5432" if database_type == "postgresql" else "3306")
- db_user = os.environ.get("DB_USER", "postgres" if database_type == "postgresql" else "root")
-
- print(f'pass: {database_pass}')
- if database_type == "postgresql":
- os.environ['PGPASSWORD'] = database_pass
- cmd = [
- "pg_dump",
- "-h", db_host,
- "-p", db_port,
- "-U", db_user,
- "-d", db_name,
- "-w"
- ]
- else: # Assuming MySQL or MariaDB
- cmd = [
- "mysqldump",
- "-h", db_host,
- "-P", db_port,
- "-u", db_user,
- "--password=" + database_pass,
- db_name
- ]
- try:
- process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- stdout, stderr = process.communicate()
- print("STDOUT:", stdout.decode())
- print("STDERR:", stderr.decode())
- if process.returncode != 0:
- # Handle error
- raise Exception(f"Backup failed with error: {stderr.decode()}")
- return stdout.decode()
- finally:
- if database_type == "postgresql":
- del os.environ['PGPASSWORD']
-
-
-def restore_server(cnx, database_pass, file_content):
- import tempfile
-
- with tempfile.NamedTemporaryFile(mode='wb', delete=True) as tempf:
- tempf.write(file_content)
- tempf.flush()
-
- cmd = [
- "mysql",
- "-h", os.environ.get("DB_HOST", "db"),
- "-P", os.environ.get("DB_PORT", "3306"),
- "-u", os.environ.get("DB_USER", "root"),
- f"-p{database_pass}",
- os.environ.get("DB_NAME", "pinepods_database")
- ]
-
- process = subprocess.Popen(
- cmd,
- stdin=open(tempf.name, 'rb'),
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE
- )
-
- stdout, stderr = process.communicate()
- if process.returncode != 0:
- raise Exception(f"Restoration failed with error: {stderr.decode()}")
-
- return "Restoration completed successfully!"
-
-
-def get_video_date(video_id):
- logging.basicConfig(level=logging.INFO)
- logger = logging.getLogger(__name__)
- """Get upload date for a single video"""
- url = f"https://www.youtube.com/watch?v={video_id}"
- headers = {
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36"
- }
-
- try:
- # Add a small random delay to avoid rate limiting
- time.sleep(random.uniform(0.5, 1.5))
-
- response = requests.get(url, headers=headers)
- response.raise_for_status()
-
- # Look for uploadDate in page content
- date_pattern = r'"uploadDate":"([^"]+)"'
- date_match = re.search(date_pattern, response.text)
-
- if date_match:
- date_str = date_match.group(1)
- # Convert ISO format to datetime
- upload_date = datetime.datetime.fromisoformat(date_str.replace('Z', '+00:00'))
- return upload_date
- return None
-
- except Exception as e:
- logger.error(f"Error fetching date for video {video_id}: {e}")
- return None
-
-def check_and_send_notification(cnx, database_type, podcast_id, episode_title):
- cursor = cnx.cursor()
- try:
- # First check if notifications are enabled for this podcast
- if database_type == "postgresql":
- query = """
- SELECT p.NotificationsEnabled, p.UserID, p.PodcastName,
- uns.Platform, uns.Enabled, uns.NtfyTopic, uns.NtfyServerUrl,
- uns.GotifyUrl, uns.GotifyToken
- FROM "Podcasts" p
- JOIN "UserNotificationSettings" uns ON p.UserID = uns.UserID
- WHERE p.PodcastID = %s AND p.NotificationsEnabled = true AND uns.Enabled = true
- """
- else:
- query = """
- SELECT p.NotificationsEnabled, p.UserID, p.PodcastName,
- uns.Platform, uns.Enabled, uns.NtfyTopic, uns.NtfyServerUrl,
- uns.GotifyUrl, uns.GotifyToken
- FROM Podcasts p
- JOIN UserNotificationSettings uns ON p.UserID = uns.UserID
- WHERE p.PodcastID = %s AND p.NotificationsEnabled = 1 AND uns.Enabled = 1
- """
- cursor.execute(query, (podcast_id,))
- results = cursor.fetchall() # Get all enabled notification settings
- if not results:
- return False
-
- success = False # Track if at least one notification was sent
-
- for result in results:
- try:
- if isinstance(result, dict):
- platform = result['platform'] if 'platform' in result else result['Platform']
- podcast_name = result['podcastname'] if 'podcastname' in result else result['PodcastName']
-
- if platform == 'ntfy':
- # Try both casings for each field
- ntfy_topic = result.get('ntfytopic') or result.get('NtfyTopic')
- ntfy_server = result.get('ntfyserverurl') or result.get('NtfyServerUrl')
-
- if ntfy_topic and ntfy_server:
- if send_ntfy_notification(
- topic=ntfy_topic,
- server_url=ntfy_server,
- title=f"New Episode: {podcast_name}",
- message=f"New episode published: {episode_title}"
- ):
- success = True
-
- elif platform == 'gotify':
- gotify_url = result.get('gotifyurl') or result.get('GotifyUrl')
- gotify_token = result.get('gotifytoken') or result.get('GotifyToken')
-
- if gotify_url and gotify_token:
- if send_gotify_notification(
- server_url=gotify_url,
- token=gotify_token,
- title=f"New Episode: {podcast_name}",
- message=f"New episode published: {episode_title}"
- ):
- success = True
- else:
- platform = result[3]
- podcast_name = result[2]
- if platform == 'ntfy':
- if send_ntfy_notification(
- topic=result[5],
- server_url=result[6],
- title=f"New Episode: {podcast_name}",
- message=f"New episode published: {episode_title}"
- ):
- success = True
- elif platform == 'gotify':
- if send_gotify_notification(
- server_url=result[7],
- token=result[8],
- title=f"New Episode: {podcast_name}",
- message=f"New episode published: {episode_title}"
- ):
- success = True
- except Exception as e:
- logging.error(f"Error sending {platform} notification: {e}")
- # Continue trying other platforms even if one fails
- continue
-
- return success
-
- except Exception as e:
- logging.error(f"Error checking/sending notifications: {e}")
- return False
- finally:
- cursor.close()
-
-def toggle_podcast_notifications(cnx, database_type, podcast_id, user_id, enabled):
- cursor = cnx.cursor()
- try:
- # First verify the user owns this podcast
- if database_type == "postgresql":
- check_query = """
- SELECT 1 FROM "Podcasts"
- WHERE PodcastID = %s AND UserID = %s
- """
- else:
- check_query = """
- SELECT 1 FROM Podcasts
- WHERE PodcastID = %s AND UserID = %s
- """
-
- cursor.execute(check_query, (podcast_id, user_id))
- if not cursor.fetchone():
- logging.warning(f"User {user_id} attempted to modify notifications for podcast {podcast_id} they don't own")
- return False
-
- # Update the notification setting
- if database_type == "postgresql":
- update_query = """
- UPDATE "Podcasts"
- SET NotificationsEnabled = %s
- WHERE PodcastID = %s AND UserID = %s
- """
- else:
- update_query = """
- UPDATE Podcasts
- SET NotificationsEnabled = %s
- WHERE PodcastID = %s AND UserID = %s
- """
-
- cursor.execute(update_query, (enabled, podcast_id, user_id))
- cnx.commit()
- return True
-
- except Exception as e:
- logging.error(f"Error toggling podcast notifications: {e}")
- cnx.rollback()
- return False
- finally:
- cursor.close()
-
-def get_podcast_notification_status(cnx, database_type, podcast_id, user_id):
- cursor = cnx.cursor()
- try:
- # Query the notification status
- if database_type == "postgresql":
- query = """
- SELECT NotificationsEnabled
- FROM "Podcasts"
- WHERE PodcastID = %s AND UserID = %s
- """
- else:
- query = """
- SELECT NotificationsEnabled
- FROM Podcasts
- WHERE PodcastID = %s AND UserID = %s
- """
- cursor.execute(query, (podcast_id, user_id))
- result = cursor.fetchone()
- if result:
- if isinstance(result, dict): # PostgreSQL with RealDictCursor
- # Try all possible case variations
- for key in ['NotificationsEnabled', 'notificationsenabled']:
- if key in result:
- return bool(result[key])
- else: # MySQL or regular PostgreSQL cursor
- return bool(result[0])
- return False # Default to False if no result found
- except Exception as e:
- logging.error(f"Error getting podcast notification status: {e}")
- logging.error(f"Result content: {result}") # Add this for debugging
- return False
- finally:
- cursor.close()
-
-# Functions for OIDC
-
-def get_oidc_provider(cnx, database_type, client_id):
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- query = """
- SELECT ProviderID, ClientID, ClientSecret, TokenURL, UserInfoURL, NameClaim, EmailClaim, UsernameClaim, RolesClaim, UserRole, AdminRole
- FROM "OIDCProviders"
- WHERE ClientID = %s AND Enabled = true
- """
- else:
- query = """
- SELECT ProviderID, ClientID, ClientSecret, TokenURL, UserInfoURL, NameClaim, EmailClaim, UsernameClaim, RolesClaim, UserRole, AdminRole
- FROM OIDCProviders
- WHERE ClientID = %s AND Enabled = true
- """
- cursor.execute(query, (client_id,))
- result = cursor.fetchone()
- if result:
- if isinstance(result, dict):
- return (
- result['providerid'],
- result['clientid'],
- result['clientsecret'],
- result['tokenurl'],
- result['userinfourl'],
- result['nameclaim'],
- result['emailclaim'],
- result['usernameclaim'],
- result['rolesclaim'],
- result['userrole'],
- result['adminrole']
- )
- return result
- return None
- finally:
- cursor.close()
-
-def get_user_by_email(cnx, database_type, email):
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- query = """
- SELECT UserID, Email, Username, Fullname, IsAdmin
- FROM "Users"
- WHERE Email = %s
- """
- else:
- query = """
- SELECT UserID, Email, Username, Fullname, IsAdmin
- FROM Users
- WHERE Email = %s
- """
- cursor.execute(query, (email,))
- result = cursor.fetchone()
- if result:
- if isinstance(result, dict):
- return (
- result['userid'],
- result['email'],
- result['username'],
- result['fullname'],
- result['isadmin']
- )
- return result
- return None
- finally:
- cursor.close()
-
-def create_oidc_user(cnx, database_type, email, fullname, username):
- cursor = cnx.cursor()
- try:
- print(f"Starting create_oidc_user for email: {email}, fullname: {fullname}, username: {username}")
-
- # Create a random salt using base64 (which is what Argon2 expects)
- salt = base64.b64encode(secrets.token_bytes(16)).decode('utf-8')
- # Create an impossible-to-match hash that's clearly marked as OIDC
- # Using proper Argon2id format but with an impossible hash
- hashed_password = f"$argon2id$v=19$m=65536,t=3,p=4${salt}${'X' * 43}_OIDC_ACCOUNT_NO_PASSWORD"
-
- print(f"Inserting new user with username: {username}, email: {email}")
- # Insert user
- if database_type == "postgresql":
- query = """
- INSERT INTO "Users"
- (Fullname, Username, Email, Hashed_PW, IsAdmin)
- VALUES (%s, %s, %s, %s, false)
- RETURNING UserID
- """
- else:
- query = """
- INSERT INTO Users
- (Fullname, Username, Email, Hashed_PW, IsAdmin)
- VALUES (%s, %s, %s, %s, 0)
- """
- cursor.execute(query, (fullname, username, email, hashed_password))
-
- # Get user ID
- if database_type == "postgresql":
- result = cursor.fetchone()
- print(f"PostgreSQL INSERT result: {result}, type: {type(result)}")
-
- if result is None:
- print("ERROR: No result returned from INSERT RETURNING")
- raise Exception("No user ID returned from database after insertion")
-
- # Handle different result types
- if isinstance(result, tuple):
- print(f"Result is tuple: {result}")
- user_id = result[0]
- elif isinstance(result, dict):
- print(f"Result is dict: {result}")
- # Note: PostgreSQL column names are lowercase by default
- user_id = result.get('userid')
- if user_id is None:
- # Try other possible key variations
- user_id = result.get('UserID') or result.get('userID') or result.get('user_id')
- else:
- print(f"Unexpected result type: {type(result)}, value: {result}")
- # Try to extract user_id safely
- try:
- # Try accessing as a number
- user_id = int(result)
- except (TypeError, ValueError):
- # If that fails, convert to string and raise exception
- result_str = str(result)
- print(f"Result as string: {result_str}")
- raise Exception(f"Unable to extract user_id from result: {result_str}")
- else:
- user_id = cursor.lastrowid
- print(f"MySQL lastrowid: {user_id}")
-
- print(f"Extracted user_id: {user_id}, type: {type(user_id)}")
-
- if not user_id:
- print("ERROR: user_id is empty or zero")
- raise Exception("Invalid user_id after user creation")
-
- # Add default user settings
- print(f"Inserting default user settings for user_id: {user_id}")
- settings_query = """
- INSERT INTO "UserSettings"
- (UserID, Theme)
- VALUES (%s, %s)
- """ if database_type == "postgresql" else """
- INSERT INTO UserSettings
- (UserID, Theme)
- VALUES (%s, %s)
- """
- cursor.execute(settings_query, (user_id, 'Nordic'))
-
- # Add default user stats
- print(f"Inserting default user stats for user_id: {user_id}")
- stats_query = """
- INSERT INTO "UserStats"
- (UserID)
- VALUES (%s)
- """ if database_type == "postgresql" else """
- INSERT INTO UserStats
- (UserID)
- VALUES (%s)
- """
- cursor.execute(stats_query, (user_id,))
-
- print(f"Committing transaction")
- cnx.commit()
- print(f"User creation complete, returning user_id: {user_id}")
- return user_id
- except Exception as e:
- print(f"Error in create_oidc_user: {str(e)}")
- import traceback
- print(f"Traceback: {traceback.format_exc()}")
- cnx.rollback()
- raise
- finally:
- cursor.close()
-
-def get_user_startpage(cnx, database_type, user_id):
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- query = """
- SELECT StartPage
- FROM "UserSettings"
- WHERE UserID = %s
- """
- else:
- query = """
- SELECT StartPage
- FROM UserSettings
- WHERE UserID = %s
- """
-
- cursor.execute(query, (user_id,))
- result = cursor.fetchone()
-
- # Return 'home' as default if no setting is found
- if result:
- return result[0] if isinstance(result, tuple) else result['startpage']
- return 'home'
-
- except Exception as e:
- raise
- finally:
- cursor.close()
-
-def set_user_startpage(cnx, database_type, user_id, startpage):
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- query = """
- UPDATE "UserSettings"
- SET StartPage = %s
- WHERE UserID = %s
- """
- else:
- query = """
- UPDATE UserSettings
- SET StartPage = %s
- WHERE UserID = %s
- """
-
- cursor.execute(query, (startpage, user_id))
- cnx.commit()
- return True
-
- except Exception as e:
- cnx.rollback()
- raise
- finally:
- cursor.close()
-
-
-def convert_booleans(data):
- boolean_fields = ['completed', 'saved', 'queued', 'downloaded', 'is_youtube', 'explicit', 'is_system_playlist', 'include_unplayed', 'include_partially_played', 'include_played']
-
- if isinstance(data, dict):
- for key, value in data.items():
- if key in boolean_fields and value is not None:
- # Convert 0/1 to False/True for known boolean fields
- data[key] = bool(value)
- elif isinstance(value, (dict, list)):
- # Recursively process nested dictionaries and lists
- data[key] = convert_booleans(value)
- elif isinstance(data, list):
- # Process each item in the list
- for i, item in enumerate(data):
- data[i] = convert_booleans(item)
-
- return data
-
-def get_home_overview(database_type, cnx, user_id):
- if database_type == "postgresql":
- cnx.row_factory = dict_row
- cursor = cnx.cursor()
- else:
- cursor = cnx.cursor(dictionary=True)
-
- home_data = {
- "recent_episodes": [],
- "in_progress_episodes": [],
- "top_podcasts": [],
- "saved_count": 0,
- "downloaded_count": 0,
- "queue_count": 0
- }
-
- # Recent Episodes query with is_youtube field
- if database_type == "postgresql":
- recent_query = """
- SELECT
- "Episodes".EpisodeID,
- "Episodes".EpisodeTitle,
- "Episodes".EpisodePubDate,
- "Episodes".EpisodeDescription,
- "Episodes".EpisodeArtwork,
- "Episodes".EpisodeURL,
- "Episodes".EpisodeDuration,
- "Episodes".Completed,
- "Podcasts".PodcastName,
- "Podcasts".PodcastID,
- "Podcasts".IsYouTubeChannel as is_youtube,
- "UserEpisodeHistory".ListenDuration,
- CASE WHEN "SavedEpisodes".EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS saved,
- CASE WHEN "EpisodeQueue".EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS queued,
- CASE WHEN "DownloadedEpisodes".EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded
- FROM "Episodes"
- INNER JOIN "Podcasts" ON "Episodes".PodcastID = "Podcasts".PodcastID
- LEFT JOIN "UserEpisodeHistory" ON
- "Episodes".EpisodeID = "UserEpisodeHistory".EpisodeID
- AND "UserEpisodeHistory".UserID = %s
- LEFT JOIN "SavedEpisodes" ON
- "Episodes".EpisodeID = "SavedEpisodes".EpisodeID
- AND "SavedEpisodes".UserID = %s
- LEFT JOIN "EpisodeQueue" ON
- "Episodes".EpisodeID = "EpisodeQueue".EpisodeID
- AND "EpisodeQueue".UserID = %s
- LEFT JOIN "DownloadedEpisodes" ON
- "Episodes".EpisodeID = "DownloadedEpisodes".EpisodeID
- AND "DownloadedEpisodes".UserID = %s
- WHERE "Podcasts".UserID = %s
- AND "Episodes".EpisodePubDate >= NOW() - INTERVAL '7 days'
- ORDER BY "Episodes".EpisodePubDate DESC
- LIMIT 10
- """
- else: # MySQL or MariaDB
- recent_query = """
- SELECT
- Episodes.EpisodeID,
- Episodes.EpisodeTitle,
- Episodes.EpisodePubDate,
- Episodes.EpisodeDescription,
- Episodes.EpisodeArtwork,
- Episodes.EpisodeURL,
- Episodes.EpisodeDuration,
- Episodes.Completed,
- Podcasts.PodcastName,
- Podcasts.PodcastID,
- Podcasts.IsYouTubeChannel as is_youtube,
- UserEpisodeHistory.ListenDuration,
- CASE WHEN SavedEpisodes.EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS saved,
- CASE WHEN EpisodeQueue.EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS queued,
- CASE WHEN DownloadedEpisodes.EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded
- FROM Episodes
- INNER JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID
- LEFT JOIN UserEpisodeHistory ON
- Episodes.EpisodeID = UserEpisodeHistory.EpisodeID
- AND UserEpisodeHistory.UserID = %s
- LEFT JOIN SavedEpisodes ON
- Episodes.EpisodeID = SavedEpisodes.EpisodeID
- AND SavedEpisodes.UserID = %s
- LEFT JOIN EpisodeQueue ON
- Episodes.EpisodeID = EpisodeQueue.EpisodeID
- AND EpisodeQueue.UserID = %s
- LEFT JOIN DownloadedEpisodes ON
- Episodes.EpisodeID = DownloadedEpisodes.EpisodeID
- AND DownloadedEpisodes.UserID = %s
- WHERE Podcasts.UserID = %s
- AND Episodes.EpisodePubDate >= DATE_SUB(NOW(), INTERVAL 7 DAY)
- ORDER BY Episodes.EpisodePubDate DESC
- LIMIT 10
- """
-
- # In Progress Episodes query with is_youtube field
- in_progress_query = """
- SELECT
- "Episodes".*,
- "Podcasts".PodcastName,
- "Podcasts".IsYouTubeChannel as is_youtube,
- "UserEpisodeHistory".ListenDuration,
- CASE WHEN "SavedEpisodes".EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS saved,
- CASE WHEN "EpisodeQueue".EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS queued,
- CASE WHEN "DownloadedEpisodes".EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded
- FROM "UserEpisodeHistory"
- JOIN "Episodes" ON "UserEpisodeHistory".EpisodeID = "Episodes".EpisodeID
- JOIN "Podcasts" ON "Episodes".PodcastID = "Podcasts".PodcastID
- LEFT JOIN "SavedEpisodes" ON
- "Episodes".EpisodeID = "SavedEpisodes".EpisodeID
- AND "SavedEpisodes".UserID = %s
- LEFT JOIN "EpisodeQueue" ON
- "Episodes".EpisodeID = "EpisodeQueue".EpisodeID
- AND "EpisodeQueue".UserID = %s
- LEFT JOIN "DownloadedEpisodes" ON
- "Episodes".EpisodeID = "DownloadedEpisodes".EpisodeID
- AND "DownloadedEpisodes".UserID = %s
- WHERE "UserEpisodeHistory".UserID = %s
- AND "UserEpisodeHistory".ListenDuration > 0
- AND "Episodes".Completed = FALSE
- ORDER BY "UserEpisodeHistory".ListenDate DESC
- LIMIT 10
- """ if database_type == "postgresql" else """
- SELECT
- Episodes.*,
- Podcasts.PodcastName,
- Podcasts.IsYouTubeChannel as is_youtube,
- UserEpisodeHistory.ListenDuration,
- CASE WHEN SavedEpisodes.EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS saved,
- CASE WHEN EpisodeQueue.EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS queued,
- CASE WHEN DownloadedEpisodes.EpisodeID IS NOT NULL THEN TRUE ELSE FALSE END AS downloaded
- FROM UserEpisodeHistory
- JOIN Episodes ON UserEpisodeHistory.EpisodeID = Episodes.EpisodeID
- JOIN Podcasts ON Episodes.PodcastID = Podcasts.PodcastID
- LEFT JOIN SavedEpisodes ON
- Episodes.EpisodeID = SavedEpisodes.EpisodeID
- AND SavedEpisodes.UserID = %s
- LEFT JOIN EpisodeQueue ON
- Episodes.EpisodeID = EpisodeQueue.EpisodeID
- AND EpisodeQueue.UserID = %s
- LEFT JOIN DownloadedEpisodes ON
- Episodes.EpisodeID = DownloadedEpisodes.EpisodeID
- AND DownloadedEpisodes.UserID = %s
- WHERE UserEpisodeHistory.UserID = %s
- AND UserEpisodeHistory.ListenDuration > 0
- AND Episodes.Completed = FALSE
- ORDER BY UserEpisodeHistory.ListenDate DESC
- LIMIT 10
- """
-
- # Top Podcasts query with all needed fields
- top_podcasts_query = """
- SELECT
- "Podcasts".PodcastID,
- "Podcasts".PodcastName,
- "Podcasts".PodcastIndexID,
- "Podcasts".ArtworkURL,
- "Podcasts".Author,
- "Podcasts".Categories,
- "Podcasts".Description,
- "Podcasts".EpisodeCount,
- "Podcasts".FeedURL,
- "Podcasts".WebsiteURL,
- "Podcasts".Explicit,
- "Podcasts".IsYouTubeChannel as is_youtube,
- COUNT(DISTINCT "UserEpisodeHistory".EpisodeID) as play_count,
- SUM("UserEpisodeHistory".ListenDuration) as total_listen_time
- FROM "Podcasts"
- LEFT JOIN "Episodes" ON "Podcasts".PodcastID = "Episodes".PodcastID
- LEFT JOIN "UserEpisodeHistory" ON "Episodes".EpisodeID = "UserEpisodeHistory".EpisodeID
- WHERE "Podcasts".UserID = %s
- GROUP BY "Podcasts".PodcastID
- ORDER BY total_listen_time DESC NULLS LAST
- LIMIT 6
- """ if database_type == "postgresql" else """
- SELECT
- Podcasts.PodcastID,
- Podcasts.PodcastName,
- Podcasts.PodcastIndexID,
- Podcasts.ArtworkURL,
- Podcasts.Author,
- Podcasts.Categories,
- Podcasts.Description,
- Podcasts.EpisodeCount,
- Podcasts.FeedURL,
- Podcasts.WebsiteURL,
- Podcasts.Explicit,
- Podcasts.IsYouTubeChannel as is_youtube,
- COUNT(DISTINCT UserEpisodeHistory.EpisodeID) as play_count,
- SUM(UserEpisodeHistory.ListenDuration) as total_listen_time
- FROM Podcasts
- LEFT JOIN Episodes ON Podcasts.PodcastID = Episodes.PodcastID
- LEFT JOIN UserEpisodeHistory ON Episodes.EpisodeID = UserEpisodeHistory.EpisodeID
- WHERE Podcasts.UserID = %s
- GROUP BY Podcasts.PodcastID
- ORDER BY total_listen_time DESC
- LIMIT 5
- """
-
- try:
- # Get recent episodes - need to pass 5 parameters as we have 5 placeholders
- cursor.execute(recent_query, (user_id, user_id, user_id, user_id, user_id))
- recent_results = cursor.fetchall()
- if recent_results is not None:
- home_data["recent_episodes"] = lowercase_keys(recent_results)
-
- # Get in progress episodes - need to pass 4 parameters as we have 4 placeholders
- cursor.execute(in_progress_query, (user_id, user_id, user_id, user_id))
- in_progress_results = cursor.fetchall()
- if in_progress_results is not None:
- home_data["in_progress_episodes"] = lowercase_keys(in_progress_results)
-
- # Get top podcasts
- cursor.execute(top_podcasts_query, (user_id,))
- top_podcasts_results = cursor.fetchall()
- if top_podcasts_results is not None:
- home_data["top_podcasts"] = lowercase_keys(top_podcasts_results)
-
- # Get counts
- if database_type == "postgresql":
- for table, key in [
- ("SavedEpisodes", "saved_count"),
- ("DownloadedEpisodes", "downloaded_count"),
- ("EpisodeQueue", "queue_count")
- ]:
- count_query = f'SELECT COUNT(*) FROM "{table}" WHERE userid = %s'
- cursor.execute(count_query, (user_id,))
- count_result = cursor.fetchone()
- if count_result is not None:
- home_data[key] = count_result[0] if isinstance(count_result, tuple) else count_result.get('count', 0)
-
- except Exception as e:
- print(f"Error fetching home overview: {e}")
- print(f"Error type: {type(e)}")
- import traceback
- traceback.print_exc()
- return None
- finally:
- cursor.close()
-
- if database_type != "postgresql":
- home_data = convert_booleans(home_data)
-
- return lowercase_keys(home_data)
-
-def get_playback_speed(cnx, database_type: str, user_id: int, is_youtube: bool, podcast_id: Optional[int] = None) -> float:
- cursor = cnx.cursor()
- if database_type == "postgresql":
- if podcast_id is None:
- query = 'SELECT PlaybackSpeed FROM "Users" WHERE UserID = %s'
- else:
- query = 'SELECT PlaybackSpeed FROM "Podcasts" WHERE PodcastID = %s'
- else:
- if podcast_id is None:
- query = 'SELECT PlaybackSpeed FROM Users WHERE UserID = %s'
- else:
- query = 'SELECT PlaybackSpeed FROM Podcasts WHERE PodcastID = %s'
- cursor.execute(query, (user_id,))
- result = cursor.fetchone()
- cursor.close()
-
- if result:
- # Handle both tuple and dictionary return types with case variations
- if isinstance(result, dict):
- # Try both cases for PostgreSQL vs MySQL/MariaDB
- return result.get('PlaybackSpeed') or result.get('playbackspeed') or 1.0
- else:
- return result[0] if result[0] is not None else 1.0
- return 1.0
diff --git a/database_functions/gpodder_router.py b/database_functions/gpodder_router.py
deleted file mode 100644
index d813b720..00000000
--- a/database_functions/gpodder_router.py
+++ /dev/null
@@ -1,669 +0,0 @@
-from fastapi import APIRouter, Depends, HTTPException, status, Request, Response
-from pydantic import BaseModel
-from typing import List, Dict, Optional, Any
-import sys
-import base64
-
-# Internal Modules
-sys.path.append('/pinepods')
-
-import database_functions.functions
-from database_functions.db_client import get_database_connection, database_type
-
-# Create models for the API
-class DeviceCreate(BaseModel):
- user_id: int
- device_name: str
- device_type: Optional[str] = "desktop"
- device_caption: Optional[str] = None
-
-class Device(BaseModel):
- id: int
- name: str
- type: str
- caption: Optional[str] = None
- last_sync: Optional[str] = None
- is_active: bool = True
- is_remote: Optional[bool] = False
- is_default: Optional[bool] = False
-
-class SyncRequest(BaseModel):
- user_id: int
- device_id: Optional[int] = None
- device_name: Optional[str] = None
- is_remote: bool = False
-
-class ApiResponse(BaseModel):
- success: bool
- message: str
- data: Optional[Any] = None
-
-# Create the router
-gpodder_router = APIRouter(prefix="/api/gpodder", tags=["gpodder"])
-
-# Authentication function (assumed to be defined elsewhere)
-async def get_api_key_from_header(request: Request):
- api_key = request.headers.get("Api-Key")
- if not api_key:
- raise HTTPException(status_code=403, detail="API key is required")
- return api_key
-
-async def has_elevated_access(api_key: str, cnx):
- user_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- is_admin = database_functions.functions.user_admin_check(cnx, database_type, user_id)
- return is_admin
-
-@gpodder_router.get("/devices/{user_id}", response_model=List[Device])
-async def get_user_devices_endpoint(
- user_id: int,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- """Get all GPodder devices for a user (both local and remote)"""
- import logging
- import requests
- from requests.auth import HTTPBasicAuth
-
- logger = logging.getLogger(__name__)
-
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(
- status_code=403,
- detail="Your API key is either invalid or does not have correct permission"
- )
-
- # Check if the user has permission
- elevated_access = await has_elevated_access(api_key, cnx)
- if not elevated_access:
- user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- if user_id != user_id_from_api_key:
- raise HTTPException(
- status_code=status.HTTP_403_FORBIDDEN,
- detail="You are not authorized to access these devices"
- )
-
- # Get local devices with our updated function that handles datetime conversion
- local_devices = database_functions.functions.get_user_devices(cnx, database_type, user_id)
-
- # Create a default device if no local devices exist
- if not local_devices:
- default_device_id = database_functions.functions.get_or_create_default_device(cnx, database_type, user_id)
- if default_device_id:
- local_devices = database_functions.functions.get_user_devices(cnx, database_type, user_id)
-
- # Get GPodder settings to fetch remote devices
- cursor = cnx.cursor()
- try:
- if database_type == "postgresql":
- query = 'SELECT GpodderUrl, GpodderLoginName, GpodderToken FROM "Users" WHERE UserID = %s'
- else:
- query = "SELECT GpodderUrl, GpodderLoginName, GpodderToken FROM Users WHERE UserID = %s"
-
- cursor.execute(query, (user_id,))
- result = cursor.fetchone()
-
- if not result:
- logger.warning(f"User {user_id} not found or has no GPodder settings")
- return local_devices
-
- if isinstance(result, dict):
- gpodder_url = result["gpodderurl"]
- gpodder_login = result["gpodderloginname"]
- encrypted_token = result["gpoddertoken"]
- else:
- gpodder_url = result[0]
- gpodder_login = result[1]
- encrypted_token = result[2]
-
- # If no GPodder settings, return only local devices
- if not gpodder_url or not gpodder_login:
- logger.warning(f"User {user_id} has no GPodder settings")
- return local_devices
-
- # Decrypt the token
- from cryptography.fernet import Fernet
- encryption_key = database_functions.functions.get_encryption_key(cnx, database_type)
- encryption_key_bytes = base64.b64decode(encryption_key)
- cipher_suite = Fernet(encryption_key_bytes)
-
- if encrypted_token:
- decrypted_token_bytes = cipher_suite.decrypt(encrypted_token.encode())
- gpodder_token = decrypted_token_bytes.decode()
- else:
- gpodder_token = None
-
- # Create auth for requests
- auth = HTTPBasicAuth(gpodder_login, gpodder_token)
-
- # Try to fetch remote devices
- session = requests.Session()
-
- # First login to establish session
- login_url = f"{gpodder_url}/api/2/auth/{gpodder_login}/login.json"
- logger.info(f"Logging in to fetch devices: {login_url}")
-
- login_response = session.post(login_url, auth=auth)
- login_response.raise_for_status()
-
- # Fetch devices from server
- devices_url = f"{gpodder_url}/api/2/devices/{gpodder_login}.json"
- logger.info(f"Fetching devices from: {devices_url}")
-
- devices_response = session.get(devices_url, auth=auth)
-
- if devices_response.status_code == 200:
- try:
- # Parse remote devices
- remote_devices = devices_response.json()
- logger.info(f"Found {len(remote_devices)} remote devices")
-
- # Create a map of local devices by name for quick lookup
- local_devices_by_name = {device["name"]: device for device in local_devices}
-
- # Process remote devices
- for remote_device in remote_devices:
- # Extract device information
- remote_name = remote_device.get("id", "")
-
- # Skip if we already have this device locally
- if remote_name in local_devices_by_name:
- continue
-
- # Convert to our format
- device_info = {
- "id": -1, # Use -1 to indicate it's a remote device not in our DB yet
- "name": remote_name,
- "type": remote_device.get("type", "unknown"),
- "caption": remote_device.get("caption", None),
- "last_sync": None, # We don't have this info
- "is_active": True,
- "is_remote": True # Flag to indicate it's a remote device
- }
-
- # Add to our list
- local_devices.append(device_info)
-
- logger.info(f"Returning {len(local_devices)} total devices")
- return local_devices
-
- except Exception as e:
- logger.error(f"Error parsing remote devices: {e}")
- # Return only local devices on error
- return local_devices
- else:
- logger.warning(f"Failed to fetch remote devices: {devices_response.status_code}")
- # Return only local devices on error
- return local_devices
-
- except Exception as e:
- logger.error(f"Error fetching devices: {e}")
- return local_devices
- finally:
- cursor.close()
-
-@gpodder_router.get("/default_device", response_model=Device)
-async def get_default_device_endpoint_new(
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- """Get the default GPodder device for the user"""
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(
- status_code=403,
- detail="Your API key is either invalid or does not have correct permission"
- )
- # Get user ID from API key
- user_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- # Get the default device
- device = database_functions.functions.get_default_gpodder_device(cnx, database_type, user_id)
- if device:
- return Device(
- id=device["id"],
- name=device["name"],
- type=device["type"],
- caption=device["caption"],
- last_sync=device["last_sync"].isoformat() if device["last_sync"] else None,
- is_active=device["is_active"],
- is_remote=device["is_remote"],
- is_default=device["is_default"]
- )
- else:
- raise HTTPException(
- status_code=404,
- detail="No default GPodder device found"
- )
-
-@gpodder_router.post("/set_default/{device_id}", response_model=ApiResponse)
-async def set_default_device_endpoint_new(
- device_id: int,
- device_name: Optional[str] = None,
- is_remote: bool = False,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- """Set a GPodder device as the default for the user"""
- import logging
- logger = logging.getLogger(__name__)
-
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(
- status_code=403,
- detail="Your API key is either invalid or does not have correct permission"
- )
-
- # Get user ID from API key
- user_id = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
-
- # Log information for debugging
- logger.info(f"Setting default device with ID: {device_id}, name: {device_name}, is_remote: {is_remote}")
-
- # Handle remote devices (negative IDs)
- if device_id < 0:
- if not device_name:
- # For remote devices, we need the device name
- raise HTTPException(
- status_code=400,
- detail="Device name is required for remote devices"
- )
-
- # Use the dedicated function to handle remote devices
- success, message, _ = database_functions.functions.handle_remote_device(
- cnx, database_type, user_id, device_name
- )
-
- if not success:
- raise HTTPException(
- status_code=500,
- detail=message
- )
-
- return ApiResponse(
- success=True,
- message="Default GPodder device set successfully"
- )
- else:
- # For local devices, proceed normally
- success = database_functions.functions.set_default_gpodder_device(cnx, database_type, user_id, device_id)
-
- if success:
- return ApiResponse(
- success=True,
- message="Default GPodder device set successfully"
- )
- else:
- raise HTTPException(
- status_code=400,
- detail="Failed to set default GPodder device"
- )
-
-@gpodder_router.post("/devices", response_model=Device)
-async def create_device(
- device: DeviceCreate,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- """Create a new GPodder device for a user"""
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(
- status_code=403,
- detail="Your API key is either invalid or does not have correct permission"
- )
-
- # Check if the user has permission
- elevated_access = await has_elevated_access(api_key, cnx)
- if not elevated_access:
- user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- if device.user_id != user_id_from_api_key:
- raise HTTPException(
- status_code=status.HTTP_403_FORBIDDEN,
- detail="You are not authorized to create devices for this user"
- )
-
- # Create device
- device_id = database_functions.functions.create_or_update_device(
- cnx,
- database_type,
- device.user_id,
- device.device_name,
- device.device_type,
- device.device_caption
- )
-
- if not device_id:
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail="Failed to create device"
- )
-
- # Get the created device
- devices = database_functions.functions.get_user_devices(cnx, database_type, device.user_id)
- for d in devices:
- if d["id"] == device_id:
- return d
-
- # This should not happen
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail="Device created but not found"
- )
-
-@gpodder_router.post("/sync/force", response_model=ApiResponse)
-async def force_full_sync(
- sync_request: SyncRequest,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- """Force a full sync of all local podcasts to the GPodder server"""
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(
- status_code=403,
- detail="Your API key is either invalid or does not have correct permission"
- )
-
- # Check if the user has permission
- elevated_access = await has_elevated_access(api_key, cnx)
- if not elevated_access:
- user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- if sync_request.user_id != user_id_from_api_key:
- raise HTTPException(
- status_code=status.HTTP_403_FORBIDDEN,
- detail="You are not authorized to force sync for this user"
- )
-
- # Get GPodder settings
- user_id = sync_request.user_id
- gpodder_settings = database_functions.functions.get_gpodder_settings(database_type, cnx, user_id)
-
- if not gpodder_settings or not gpodder_settings.get("gpodderurl"):
- raise HTTPException(
- status_code=status.HTTP_400_BAD_REQUEST,
- detail="GPodder settings not configured for this user"
- )
-
- # Get login name
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'SELECT GpodderLoginName FROM "Users" WHERE UserID = %s'
- else:
- query = "SELECT GpodderLoginName FROM Users WHERE UserID = %s"
-
- cursor.execute(query, (user_id,))
- result = cursor.fetchone()
- cursor.close()
-
- if not result:
- raise HTTPException(
- status_code=status.HTTP_400_BAD_REQUEST,
- detail="User not found"
- )
-
- gpodder_login = result[0] if isinstance(result, tuple) else result["gpodderloginname"]
-
- # Force sync
- success = database_functions.functions.force_full_sync_to_gpodder(
- database_type,
- cnx,
- user_id,
- gpodder_settings.get("gpodderurl"),
- gpodder_settings.get("gpoddertoken"),
- gpodder_login,
- sync_request.device_id, # Pass device_id from request
- sync_request.device_name, # Pass device_name from request
- sync_request.is_remote # Pass is_remote from request
- )
-
- if not success:
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail="Failed to force synchronization"
- )
-
- return ApiResponse(
- success=True,
- message="Successfully synchronized all podcasts to GPodder"
- )
-
-@gpodder_router.post("/sync", response_model=ApiResponse)
-async def sync_with_gpodder(
- sync_request: SyncRequest,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- """Sync podcasts from GPodder to local database"""
- print('running a sync')
- user_id = sync_request.user_id
- # Get user information
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = '''
- SELECT GpodderLoginName, Pod_Sync_Type, GpodderUrl, GpodderToken, Username
- FROM "Users"
- WHERE UserID = %s
- '''
- else:
- query = '''
- SELECT GpodderLoginName, Pod_Sync_Type, GpodderUrl, GpodderToken, Username
- FROM Users
- WHERE UserID = %s
- '''
- cursor.execute(query, (user_id,))
- result = cursor.fetchone()
- cursor.close()
- if not result:
- raise HTTPException(
- status_code=status.HTTP_400_BAD_REQUEST,
- detail="User not found"
- )
- print('grabbing user data')
- # Extract user data
- if isinstance(result, tuple):
- gpodder_login = result[0]
- pod_sync_type = result[1]
- gpodder_url = result[2]
- gpodder_token = result[3]
- username = result[4]
- else:
- gpodder_login = result["gpodderloginname"]
- pod_sync_type = result["pod_sync_type"]
- gpodder_url = result["gpodderurl"]
- gpodder_token = result["gpoddertoken"]
- username = result["username"]
- # Check if GPodder sync is enabled
- if pod_sync_type not in ["gpodder", "both", "external"]:
- raise HTTPException(
- status_code=status.HTTP_400_BAD_REQUEST,
- detail="GPodder sync not enabled for this user"
- )
- # Check if this is internal sync (local gpodder API)
- is_internal = gpodder_url == "http://localhost:8042"
- print(f"Syncing with device_id: {sync_request.device_id}, device_name: {sync_request.device_name}, "
- f"is_remote: {sync_request.is_remote}, is_internal: {is_internal}")
- # For external sync, use the function with the appropriate parameters
- print(f"Using external sync method for user {user_id} with URL {gpodder_url}")
- # Get encryption key for token handling
- cursor = cnx.cursor()
- if database_type == "postgresql":
- query = 'SELECT EncryptionKey FROM "AppSettings" WHERE AppSettingsID = 1'
- else:
- query = "SELECT EncryptionKey FROM AppSettings WHERE AppSettingsID = 1"
- cursor.execute(query)
- encryption_key_result = cursor.fetchone()
- cursor.close()
- encryption_key = None
- if encryption_key_result:
- if isinstance(encryption_key_result, tuple):
- encryption_key = encryption_key_result[0]
- else:
- encryption_key = encryption_key_result["encryptionkey"]
- print('Now doing refresh')
- success = database_functions.functions.refresh_gpodder_subscription(
- database_type,
- cnx,
- user_id,
- gpodder_url,
- gpodder_token,
- gpodder_login,
- pod_sync_type,
- sync_request.device_id,
- sync_request.device_name,
- sync_request.is_remote
- )
- print('refresh done')
- if not success:
- raise HTTPException(
- status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
- detail="Failed to synchronize with GPodder"
- )
- return ApiResponse(
- success=True,
- message="Successfully synchronized with GPodder"
- )
-
-@gpodder_router.get("/test-connection", response_model=ApiResponse)
-async def test_gpodder_connection(
- user_id: int,
- gpodder_url: str,
- gpodder_username: str,
- gpodder_password: str,
- cnx=Depends(get_database_connection),
- api_key: str = Depends(get_api_key_from_header)
-):
- """Test connection to GPodder server"""
- import requests
- from requests.auth import HTTPBasicAuth
- import logging
-
- logger = logging.getLogger(__name__)
-
- is_valid_key = database_functions.functions.verify_api_key(cnx, database_type, api_key)
- if not is_valid_key:
- raise HTTPException(
- status_code=403,
- detail="Your API key is either invalid or does not have correct permission"
- )
-
- # Check if the user has permission
- elevated_access = await has_elevated_access(api_key, cnx)
- if not elevated_access:
- user_id_from_api_key = database_functions.functions.id_from_api_key(cnx, database_type, api_key)
- if user_id != user_id_from_api_key:
- raise HTTPException(
- status_code=status.HTTP_403_FORBIDDEN,
- detail="You are not authorized to test connection for this user"
- )
-
- try:
- # Create a session and save cookies
- session = requests.Session()
- auth = HTTPBasicAuth(gpodder_username, gpodder_password)
-
- # Step 1: Login
- login_url = f"{gpodder_url}/api/2/auth/{gpodder_username}/login.json"
- logger.info(f"Testing login at: {login_url}")
-
- login_response = session.post(login_url, auth=auth)
- if login_response.status_code != 200:
- logger.error(f"Login failed: {login_response.status_code} - {login_response.text}")
- return ApiResponse(
- success=False,
- message=f"Failed to login to GPodder server: {login_response.status_code} {login_response.reason}",
- data=None
- )
-
- logger.info(f"Login successful: {login_response.status_code}")
- logger.info(f"Cookies after login: {session.cookies.get_dict()}")
-
- # Try multiple approaches to verify subscription access
-
- # 1. First try to get devices (no device parameter needed)
- logger.info("Attempting to get list of devices...")
- devices_url = f"{gpodder_url}/api/2/devices/{gpodder_username}.json"
- devices_response = session.get(devices_url)
-
- if devices_response.status_code == 200:
- logger.info(f"Devices fetch successful: {devices_response.status_code}")
-
- try:
- devices_data = devices_response.json()
- logger.info(f"Found {len(devices_data)} devices")
-
- # If devices exist, try to use the first one
- if devices_data and len(devices_data) > 0:
- device_id = devices_data[0].get('id', 'default')
- logger.info(f"Using existing device: {device_id}")
-
- # Try to get subscriptions with this device
- device_subs_url = f"{gpodder_url}/api/2/subscriptions/{gpodder_username}/{device_id}.json?since=0"
- device_subs_response = session.get(device_subs_url)
-
- if device_subs_response.status_code == 200:
- return ApiResponse(
- success=True,
- message="Successfully connected to GPodder server and verified access using existing device.",
- data={
- "auth_type": "session",
- "device_id": device_id,
- "has_devices": True
- }
- )
- except Exception as device_err:
- logger.warning(f"Error parsing devices: {str(device_err)}")
-
- # 2. Try with "default" device name
- device_name = "default"
- subscriptions_url = f"{gpodder_url}/api/2/subscriptions/{gpodder_username}/{device_name}.json?since=0"
- logger.info(f"Checking subscriptions with default device: {subscriptions_url}")
-
- subscriptions_response = session.get(subscriptions_url)
- if subscriptions_response.status_code == 200:
- logger.info(f"Subscriptions check successful with default device: {subscriptions_response.status_code}")
-
- return ApiResponse(
- success=True,
- message="Successfully connected to GPodder server and verified access with default device.",
- data={
- "auth_type": "session",
- "device_name": device_name
- }
- )
-
- # 3. As a last resort, try without device name
- simple_url = f"{gpodder_url}/api/2/subscriptions/{gpodder_username}.json"
- logger.info(f"Checking subscriptions without device: {simple_url}")
-
- simple_response = session.get(simple_url)
- if simple_response.status_code == 200:
- logger.info(f"Subscriptions check successful without device: {simple_response.status_code}")
-
- return ApiResponse(
- success=True,
- message="Successfully connected to GPodder server and verified access. No device required.",
- data={
- "auth_type": "session",
- "device_required": False
- }
- )
-
- # If we got here, login worked but subscription access didn't
- logger.warning("Login successful but couldn't access subscriptions with any method")
- return ApiResponse(
- success=True,
- message="Connected to GPodder server but couldn't verify subscription access. Login credentials are valid.",
- data={
- "auth_type": "session",
- "warning": "Could not verify subscription access"
- }
- )
-
- except Exception as e:
- logger.error(f"Connection test failed: {str(e)}")
- return ApiResponse(
- success=False,
- message=f"Failed to connect to GPodder server: {str(e)}",
- data=None
- )
diff --git a/database_functions/import_progress.py b/database_functions/import_progress.py
deleted file mode 100644
index c6d9870a..00000000
--- a/database_functions/import_progress.py
+++ /dev/null
@@ -1,35 +0,0 @@
-import json
-from typing import Tuple
-from database_functions.valkey_client import valkey_client
-
-class ImportProgressManager:
- def start_import(self, user_id: int, total_podcasts: int):
- valkey_client.set(f"import_progress:{user_id}", json.dumps({
- "current": 0,
- "total": total_podcasts,
- "current_podcast": ""
- }))
-
- def update_progress(self, user_id: int, current: int, current_podcast: str):
- progress_json = valkey_client.get(f"import_progress:{user_id}")
- if progress_json:
- progress = json.loads(progress_json)
- progress.update({
- "current": current,
- "current_podcast": current_podcast
- })
- valkey_client.set(f"import_progress:{user_id}", json.dumps(progress))
-
- def get_progress(self, user_id: int) -> Tuple[int, int, str]:
- progress_json = valkey_client.get(f"import_progress:{user_id}")
- if progress_json:
- progress = json.loads(progress_json)
- return (progress.get("current", 0),
- progress.get("total", 0),
- progress.get("current_podcast", ""))
- return (0, 0, "")
-
- def clear_progress(self, user_id: int):
- valkey_client.delete(f"import_progress:{user_id}")
-
-import_progress_manager = ImportProgressManager()
diff --git a/database_functions/migration_definitions.py b/database_functions/migration_definitions.py
index 4b08e6ef..a9fa4584 100644
--- a/database_functions/migration_definitions.py
+++ b/database_functions/migration_definitions.py
@@ -1958,12 +1958,2130 @@ def migration_018_gpodder_sync_timestamp(conn, db_type: str):
cursor.close()
+@register_migration("019", "fix_encryption_key_storage", "Convert EncryptionKey from binary to text format for consistency", requires=["001"])
+def migration_019_fix_encryption_key_storage(conn, db_type: str):
+ """Convert EncryptionKey storage from binary to text format"""
+ cursor = conn.cursor()
+
+ try:
+ if db_type == "postgresql":
+ # First, get the current encryption key value as bytes
+ cursor.execute('SELECT encryptionkey FROM "AppSettings" WHERE appsettingsid = 1')
+ result = cursor.fetchone()
+
+ if result and result[0]:
+ # Convert bytes to string
+ key_bytes = result[0]
+ if isinstance(key_bytes, bytes):
+ key_string = key_bytes.decode('utf-8')
+ else:
+ key_string = str(key_bytes)
+
+ # Drop and recreate column as TEXT
+ cursor.execute('ALTER TABLE "AppSettings" DROP COLUMN encryptionkey')
+ cursor.execute('ALTER TABLE "AppSettings" ADD COLUMN encryptionkey TEXT')
+
+ # Insert the key back as text
+ cursor.execute('UPDATE "AppSettings" SET encryptionkey = %s WHERE appsettingsid = 1', (key_string,))
+ logger.info("Converted PostgreSQL encryptionkey from BYTEA to TEXT")
+ else:
+ # No existing key, just change the column type
+ cursor.execute('ALTER TABLE "AppSettings" DROP COLUMN encryptionkey')
+ cursor.execute('ALTER TABLE "AppSettings" ADD COLUMN encryptionkey TEXT')
+ logger.info("Changed PostgreSQL encryptionkey column to TEXT (no existing data)")
+
+ else: # MySQL
+ # First, get the current encryption key value
+ cursor.execute('SELECT EncryptionKey FROM AppSettings WHERE AppSettingsID = 1')
+ result = cursor.fetchone()
+
+ if result and result[0]:
+ # Convert binary to string
+ key_data = result[0]
+ if isinstance(key_data, bytes):
+ # Remove null padding and decode
+ key_string = key_data.rstrip(b'\x00').decode('utf-8')
+ else:
+ key_string = str(key_data)
+
+ # Change column type and update value
+ cursor.execute('ALTER TABLE AppSettings MODIFY EncryptionKey VARCHAR(255)')
+ cursor.execute('UPDATE AppSettings SET EncryptionKey = %s WHERE AppSettingsID = 1', (key_string,))
+ logger.info("Converted MySQL EncryptionKey from BINARY to VARCHAR")
+ else:
+ # No existing key, just change the column type
+ cursor.execute('ALTER TABLE AppSettings MODIFY EncryptionKey VARCHAR(255)')
+ logger.info("Changed MySQL EncryptionKey column to VARCHAR (no existing data)")
+
+ logger.info("Encryption key storage migration completed successfully")
+
+ except Exception as e:
+ logger.error(f"Error in encryption key migration: {e}")
+ raise
+ finally:
+ cursor.close()
+
+
+@register_migration("020", "add_default_gpodder_device", "Add DefaultGpodderDevice column to Users table for tracking user's selected GPodder device", requires=["001"])
+def migration_020_add_default_gpodder_device(conn, db_type: str):
+ """Add DefaultGpodderDevice column to Users table"""
+ cursor = conn.cursor()
+
+ try:
+ if db_type == "postgresql":
+ # Add defaultgpodderdevice column to Users table
+ safe_execute_sql(cursor, 'ALTER TABLE "Users" ADD COLUMN defaultgpodderdevice VARCHAR(255)')
+ logger.info("Added defaultgpodderdevice column to Users table (PostgreSQL)")
+
+ else: # MySQL
+ # Add DefaultGpodderDevice column to Users table
+ safe_execute_sql(cursor, 'ALTER TABLE Users ADD COLUMN DefaultGpodderDevice VARCHAR(255)')
+ logger.info("Added DefaultGpodderDevice column to Users table (MySQL)")
+
+ logger.info("Default GPodder device column migration completed successfully")
+
+ except Exception as e:
+ logger.error(f"Error in default GPodder device migration: {e}")
+ raise
+ finally:
+ cursor.close()
+
+
+@register_migration("021", "limit_system_playlists_episodes", "Add MaxEpisodes limit to high-volume system playlists", requires=["010"])
+def migration_021_limit_system_playlists_episodes(conn, db_type: str):
+ """Add MaxEpisodes limit to Commuter Mix, Longform, and Weekend Marathon system playlists"""
+ cursor = conn.cursor()
+
+ try:
+ logger.info("Starting system playlist episodes limit migration")
+
+ # Define the playlists to update with 1000 episode limit
+ playlists_to_update = ['Commuter Mix', 'Longform', 'Weekend Marathon']
+
+ if db_type == "postgresql":
+ for playlist_name in playlists_to_update:
+ safe_execute_sql(cursor, '''
+ UPDATE "Playlists"
+ SET maxepisodes = 1000
+ WHERE name = %s AND issystemplaylist = TRUE
+ ''', (playlist_name,))
+ logger.info(f"Updated {playlist_name} system playlist with maxepisodes=1000 (PostgreSQL)")
+
+ else: # MySQL
+ for playlist_name in playlists_to_update:
+ safe_execute_sql(cursor, '''
+ UPDATE Playlists
+ SET MaxEpisodes = 1000
+ WHERE Name = %s AND IsSystemPlaylist = TRUE
+ ''', (playlist_name,))
+ logger.info(f"Updated {playlist_name} system playlist with MaxEpisodes=1000 (MySQL)")
+
+ logger.info("System playlist episodes limit migration completed successfully")
+
+ except Exception as e:
+ logger.error(f"Error in system playlist episodes limit migration: {e}")
+ raise
+ finally:
+ cursor.close()
+
+
+@register_migration("022", "expand_downloaded_location_column", "Expand DownloadedLocation column size to handle long file paths", requires=["007"])
+def migration_022_expand_downloaded_location_column(conn, db_type: str):
+ """Expand DownloadedLocation column size to handle long file paths"""
+ cursor = conn.cursor()
+
+ try:
+ logger.info("Starting downloaded location column expansion migration")
+
+ if db_type == "postgresql":
+ # Expand DownloadedLocation in DownloadedEpisodes table
+ safe_execute_sql(cursor, '''
+ ALTER TABLE "DownloadedEpisodes"
+ ALTER COLUMN downloadedlocation TYPE TEXT
+ ''', conn=conn)
+ logger.info("Expanded downloadedlocation column in DownloadedEpisodes table (PostgreSQL)")
+
+ # Expand DownloadedLocation in DownloadedVideos table
+ safe_execute_sql(cursor, '''
+ ALTER TABLE "DownloadedVideos"
+ ALTER COLUMN downloadedlocation TYPE TEXT
+ ''', conn=conn)
+ logger.info("Expanded downloadedlocation column in DownloadedVideos table (PostgreSQL)")
+
+ else: # MySQL
+ # Expand DownloadedLocation in DownloadedEpisodes table
+ safe_execute_sql(cursor, '''
+ ALTER TABLE DownloadedEpisodes
+ MODIFY DownloadedLocation TEXT
+ ''', conn=conn)
+ logger.info("Expanded DownloadedLocation column in DownloadedEpisodes table (MySQL)")
+
+ # Expand DownloadedLocation in DownloadedVideos table
+ safe_execute_sql(cursor, '''
+ ALTER TABLE DownloadedVideos
+ MODIFY DownloadedLocation TEXT
+ ''', conn=conn)
+ logger.info("Expanded DownloadedLocation column in DownloadedVideos table (MySQL)")
+
+ logger.info("Downloaded location column expansion migration completed successfully")
+
+ except Exception as e:
+ logger.error(f"Error in downloaded location column expansion migration: {e}")
+ raise
+ finally:
+ cursor.close()
+
+
+@register_migration("023", "add_missing_performance_indexes", "Add missing performance indexes for queue, saved, downloaded, and history tables", requires=["006", "007"])
+def migration_023_add_missing_performance_indexes(conn, db_type: str):
+ """Add missing performance indexes for queue, saved, downloaded, and history tables"""
+ cursor = conn.cursor()
+
+ try:
+ logger.info("Starting missing performance indexes migration")
+
+ table_prefix = '"' if db_type == 'postgresql' else ''
+ table_suffix = '"' if db_type == 'postgresql' else ''
+
+ # EpisodeQueue indexes (critical for get_queued_episodes performance)
+ safe_add_index(cursor, db_type, f'CREATE INDEX idx_episodequeue_userid ON {table_prefix}EpisodeQueue{table_suffix}(UserID)', 'idx_episodequeue_userid')
+ safe_add_index(cursor, db_type, f'CREATE INDEX idx_episodequeue_episodeid ON {table_prefix}EpisodeQueue{table_suffix}(EpisodeID)', 'idx_episodequeue_episodeid')
+ safe_add_index(cursor, db_type, f'CREATE INDEX idx_episodequeue_queueposition ON {table_prefix}EpisodeQueue{table_suffix}(QueuePosition)', 'idx_episodequeue_queueposition')
+ safe_add_index(cursor, db_type, f'CREATE INDEX idx_episodequeue_userid_queueposition ON {table_prefix}EpisodeQueue{table_suffix}(UserID, QueuePosition)', 'idx_episodequeue_userid_queueposition')
+
+ # SavedEpisodes indexes (for return_episodes LEFT JOIN performance)
+ safe_add_index(cursor, db_type, f'CREATE INDEX idx_savedepisodes_userid ON {table_prefix}SavedEpisodes{table_suffix}(UserID)', 'idx_savedepisodes_userid')
+ safe_add_index(cursor, db_type, f'CREATE INDEX idx_savedepisodes_episodeid ON {table_prefix}SavedEpisodes{table_suffix}(EpisodeID)', 'idx_savedepisodes_episodeid')
+ safe_add_index(cursor, db_type, f'CREATE INDEX idx_savedepisodes_userid_episodeid ON {table_prefix}SavedEpisodes{table_suffix}(UserID, EpisodeID)', 'idx_savedepisodes_userid_episodeid')
+
+ # SavedVideos indexes (for YouTube video queries)
+ safe_add_index(cursor, db_type, f'CREATE INDEX idx_savedvideos_userid ON {table_prefix}SavedVideos{table_suffix}(UserID)', 'idx_savedvideos_userid')
+ safe_add_index(cursor, db_type, f'CREATE INDEX idx_savedvideos_videoid ON {table_prefix}SavedVideos{table_suffix}(VideoID)', 'idx_savedvideos_videoid')
+ safe_add_index(cursor, db_type, f'CREATE INDEX idx_savedvideos_userid_videoid ON {table_prefix}SavedVideos{table_suffix}(UserID, VideoID)', 'idx_savedvideos_userid_videoid')
+
+ # DownloadedEpisodes indexes (for return_episodes LEFT JOIN performance)
+ safe_add_index(cursor, db_type, f'CREATE INDEX idx_downloadedepisodes_userid ON {table_prefix}DownloadedEpisodes{table_suffix}(UserID)', 'idx_downloadedepisodes_userid')
+ safe_add_index(cursor, db_type, f'CREATE INDEX idx_downloadedepisodes_episodeid ON {table_prefix}DownloadedEpisodes{table_suffix}(EpisodeID)', 'idx_downloadedepisodes_episodeid')
+ safe_add_index(cursor, db_type, f'CREATE INDEX idx_downloadedepisodes_userid_episodeid ON {table_prefix}DownloadedEpisodes{table_suffix}(UserID, EpisodeID)', 'idx_downloadedepisodes_userid_episodeid')
+
+ # DownloadedVideos indexes (for YouTube video queries)
+ safe_add_index(cursor, db_type, f'CREATE INDEX idx_downloadedvideos_userid ON {table_prefix}DownloadedVideos{table_suffix}(UserID)', 'idx_downloadedvideos_userid')
+ safe_add_index(cursor, db_type, f'CREATE INDEX idx_downloadedvideos_videoid ON {table_prefix}DownloadedVideos{table_suffix}(VideoID)', 'idx_downloadedvideos_videoid')
+ safe_add_index(cursor, db_type, f'CREATE INDEX idx_downloadedvideos_userid_videoid ON {table_prefix}DownloadedVideos{table_suffix}(UserID, VideoID)', 'idx_downloadedvideos_userid_videoid')
+
+ # UserEpisodeHistory indexes (for return_episodes LEFT JOIN performance)
+ safe_add_index(cursor, db_type, f'CREATE INDEX idx_userepisodehistory_userid ON {table_prefix}UserEpisodeHistory{table_suffix}(UserID)', 'idx_userepisodehistory_userid')
+ safe_add_index(cursor, db_type, f'CREATE INDEX idx_userepisodehistory_episodeid ON {table_prefix}UserEpisodeHistory{table_suffix}(EpisodeID)', 'idx_userepisodehistory_episodeid')
+ safe_add_index(cursor, db_type, f'CREATE INDEX idx_userepisodehistory_userid_episodeid ON {table_prefix}UserEpisodeHistory{table_suffix}(UserID, EpisodeID)', 'idx_userepisodehistory_userid_episodeid')
+
+ # UserVideoHistory indexes (for YouTube video queries)
+ safe_add_index(cursor, db_type, f'CREATE INDEX idx_uservideohistory_userid ON {table_prefix}UserVideoHistory{table_suffix}(UserID)', 'idx_uservideohistory_userid')
+ safe_add_index(cursor, db_type, f'CREATE INDEX idx_uservideohistory_videoid ON {table_prefix}UserVideoHistory{table_suffix}(VideoID)', 'idx_uservideohistory_videoid')
+ safe_add_index(cursor, db_type, f'CREATE INDEX idx_uservideohistory_userid_videoid ON {table_prefix}UserVideoHistory{table_suffix}(UserID, VideoID)', 'idx_uservideohistory_userid_videoid')
+
+ # Additional useful indexes for query performance
+ safe_add_index(cursor, db_type, f'CREATE INDEX idx_episodes_completed ON {table_prefix}Episodes{table_suffix}(Completed)', 'idx_episodes_completed')
+ safe_add_index(cursor, db_type, f'CREATE INDEX idx_youtubevideos_completed ON {table_prefix}YouTubeVideos{table_suffix}(Completed)', 'idx_youtubevideos_completed')
+ safe_add_index(cursor, db_type, f'CREATE INDEX idx_youtubevideos_podcastid ON {table_prefix}YouTubeVideos{table_suffix}(PodcastID)', 'idx_youtubevideos_podcastid')
+ safe_add_index(cursor, db_type, f'CREATE INDEX idx_youtubevideos_publishedat ON {table_prefix}YouTubeVideos{table_suffix}(PublishedAt)', 'idx_youtubevideos_publishedat')
+
+ logger.info("Missing performance indexes migration completed successfully")
+
+ except Exception as e:
+ logger.error(f"Error in missing performance indexes migration: {e}")
+ raise
+ finally:
+ cursor.close()
+
+
+@register_migration("025", "fix_people_table_columns", "Add missing PersonImg, PeopleDBID, and AssociatedPodcasts columns to existing People tables", requires=["009"])
+def migration_025_fix_people_table_columns(conn, db_type: str):
+ """Add missing columns to existing People tables for users who upgraded from older versions"""
+ cursor = conn.cursor()
+
+ try:
+ logger.info("Starting People table columns fix migration")
+
+ if db_type == "postgresql":
+ # Check if PersonImg column exists, if not add it
+ safe_execute_sql(cursor, '''
+ DO $$
+ BEGIN
+ IF NOT EXISTS (
+ SELECT 1 FROM information_schema.columns
+ WHERE table_name = 'People' AND column_name = 'personimg'
+ ) THEN
+ ALTER TABLE "People" ADD COLUMN PersonImg TEXT;
+ END IF;
+ END $$;
+ ''', conn=conn)
+
+ # Check if PeopleDBID column exists, if not add it
+ safe_execute_sql(cursor, '''
+ DO $$
+ BEGIN
+ IF NOT EXISTS (
+ SELECT 1 FROM information_schema.columns
+ WHERE table_name = 'People' AND column_name = 'peopledbid'
+ ) THEN
+ ALTER TABLE "People" ADD COLUMN PeopleDBID INT;
+ END IF;
+ END $$;
+ ''', conn=conn)
+
+ # Check if AssociatedPodcasts column exists, if not add it
+ safe_execute_sql(cursor, '''
+ DO $$
+ BEGIN
+ IF NOT EXISTS (
+ SELECT 1 FROM information_schema.columns
+ WHERE table_name = 'People' AND column_name = 'associatedpodcasts'
+ ) THEN
+ ALTER TABLE "People" ADD COLUMN AssociatedPodcasts TEXT;
+ END IF;
+ END $$;
+ ''', conn=conn)
+
+ logger.info("Added missing columns to People table (PostgreSQL)")
+
+ else: # MySQL
+ # For MySQL, use IF NOT EXISTS syntax or try-catch approach
+ try:
+ safe_execute_sql(cursor, 'ALTER TABLE People ADD COLUMN PersonImg TEXT', conn=conn)
+ logger.info("Added PersonImg column to People table (MySQL)")
+ except Exception:
+ logger.debug("PersonImg column already exists in People table (MySQL)")
+
+ try:
+ safe_execute_sql(cursor, 'ALTER TABLE People ADD COLUMN PeopleDBID INT', conn=conn)
+ logger.info("Added PeopleDBID column to People table (MySQL)")
+ except Exception:
+ logger.debug("PeopleDBID column already exists in People table (MySQL)")
+
+ try:
+ safe_execute_sql(cursor, 'ALTER TABLE People ADD COLUMN AssociatedPodcasts TEXT', conn=conn)
+ logger.info("Added AssociatedPodcasts column to People table (MySQL)")
+ except Exception:
+ logger.debug("AssociatedPodcasts column already exists in People table (MySQL)")
+
+ logger.info("People table columns fix migration completed successfully")
+
+ except Exception as e:
+ logger.error(f"Error in People table columns fix migration: {e}")
+ raise
+ finally:
+ cursor.close()
+
+
+@register_migration("026", "limit_quick_listens_episodes", "Add MaxEpisodes limit to Quick Listens system playlist", requires=["012"])
+def migration_026_limit_quick_listens_episodes(conn, db_type: str):
+ """Add MaxEpisodes limit to Quick Listens system playlist"""
+ cursor = conn.cursor()
+
+ try:
+ logger.info("Starting Quick Listens MaxEpisodes limit migration")
+
+ if db_type == "postgresql":
+ # Update Quick Listens playlist to have maxepisodes = 1000
+ safe_execute_sql(cursor, '''
+ UPDATE "Playlists"
+ SET maxepisodes = 1000
+ WHERE name = 'Quick Listens' AND issystemplaylist = TRUE
+ ''', conn=conn)
+ logger.info("Updated Quick Listens system playlist maxepisodes=1000 (PostgreSQL)")
+
+ else: # MySQL
+ # Update Quick Listens playlist to have MaxEpisodes = 1000
+ safe_execute_sql(cursor, '''
+ UPDATE Playlists
+ SET MaxEpisodes = 1000
+ WHERE Name = 'Quick Listens' AND IsSystemPlaylist = TRUE
+ ''', conn=conn)
+ logger.info("Updated Quick Listens system playlist MaxEpisodes=1000 (MySQL)")
+
+ logger.info("Quick Listens MaxEpisodes limit migration completed successfully")
+
+ except Exception as e:
+ logger.error(f"Error in Quick Listens MaxEpisodes limit migration: {e}")
+ raise
+ finally:
+ cursor.close()
+
+
def register_all_migrations():
"""Register all migrations with the migration manager"""
# Migrations are auto-registered via decorators
logger.info("All migrations registered")
+@register_migration("024", "fix_quick_listens_min_duration", "Update Quick Listens playlist to exclude 0-duration episodes", requires=["012"])
+def migration_024_fix_quick_listens_min_duration(conn, db_type: str):
+ """Update Quick Listens system playlist to exclude episodes with 0 duration"""
+ cursor = conn.cursor()
+
+ try:
+ logger.info("Starting Quick Listens min duration fix migration")
+
+ if db_type == "postgresql":
+ # Update Quick Listens playlist to have min_duration = 1 second
+ safe_execute_sql(cursor, '''
+ UPDATE "Playlists"
+ SET minduration = 1
+ WHERE name = 'Quick Listens' AND issystemplaylist = TRUE
+ ''', conn=conn)
+ logger.info("Updated Quick Listens system playlist minduration=1 (PostgreSQL)")
+
+ else: # MySQL
+ # Update Quick Listens playlist to have MinDuration = 1 second
+ safe_execute_sql(cursor, '''
+ UPDATE Playlists
+ SET MinDuration = 1
+ WHERE Name = 'Quick Listens' AND IsSystemPlaylist = TRUE
+ ''', conn=conn)
+ logger.info("Updated Quick Listens system playlist MinDuration=1 (MySQL)")
+
+ logger.info("Quick Listens min duration fix migration completed successfully")
+
+ except Exception as e:
+ logger.error(f"Error in Quick Listens min duration fix migration: {e}")
+ raise
+ finally:
+ cursor.close()
+
+
+@register_migration("027", "add_scheduled_backups_table", "Create ScheduledBackups table for automated backup management", requires=["026"])
+def migration_027_add_scheduled_backups_table(conn, db_type: str):
+ """Create ScheduledBackups table for automated backup management"""
+ cursor = conn.cursor()
+
+ try:
+ logger.info("Starting ScheduledBackups table creation migration")
+
+ if db_type == "postgresql":
+ # Create ScheduledBackups table for PostgreSQL
+ safe_execute_sql(cursor, '''
+ CREATE TABLE IF NOT EXISTS "ScheduledBackups" (
+ id SERIAL PRIMARY KEY,
+ userid INTEGER NOT NULL,
+ cron_schedule VARCHAR(50) NOT NULL,
+ enabled BOOLEAN NOT NULL DEFAULT false,
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ UNIQUE(userid),
+ FOREIGN KEY (userid) REFERENCES "Users"(userid) ON DELETE CASCADE
+ )
+ ''', conn=conn)
+ logger.info("Created ScheduledBackups table (PostgreSQL)")
+
+ # Create index for performance
+ safe_execute_sql(cursor, '''
+ CREATE INDEX IF NOT EXISTS idx_scheduled_backups_enabled
+ ON "ScheduledBackups"(enabled)
+ ''', conn=conn)
+ logger.info("Created index on enabled column (PostgreSQL)")
+
+ else: # MySQL
+ # Create ScheduledBackups table for MySQL
+ safe_execute_sql(cursor, '''
+ CREATE TABLE IF NOT EXISTS ScheduledBackups (
+ ID INT AUTO_INCREMENT PRIMARY KEY,
+ UserID INT NOT NULL,
+ CronSchedule VARCHAR(50) NOT NULL,
+ Enabled BOOLEAN NOT NULL DEFAULT FALSE,
+ CreatedAt TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ UpdatedAt TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
+ UNIQUE KEY unique_user (UserID),
+ FOREIGN KEY (UserID) REFERENCES Users(UserID) ON DELETE CASCADE
+ )
+ ''', conn=conn)
+ logger.info("Created ScheduledBackups table (MySQL)")
+
+ # Create index for performance
+ safe_execute_sql(cursor, '''
+ CREATE INDEX idx_scheduled_backups_enabled
+ ON ScheduledBackups(Enabled)
+ ''', conn=conn)
+ logger.info("Created index on Enabled column (MySQL)")
+
+ logger.info("ScheduledBackups table creation migration completed successfully")
+
+ except Exception as e:
+ logger.error(f"Error in ScheduledBackups table creation migration: {e}")
+ raise
+ finally:
+ cursor.close()
+
+
+@register_migration("028", "add_ignore_podcast_index_column", "Add IgnorePodcastIndex column to Podcasts table", requires=["027"])
+def migration_028_add_ignore_podcast_index_column(conn, db_type: str):
+ """
+ Migration 028: Add IgnorePodcastIndex column to Podcasts table
+ """
+ logger.info("Starting migration 028: Add IgnorePodcastIndex column to Podcasts table")
+ cursor = conn.cursor()
+
+ try:
+ if db_type == 'postgresql':
+ safe_execute_sql(cursor, '''
+ ALTER TABLE "Podcasts"
+ ADD COLUMN IF NOT EXISTS IgnorePodcastIndex BOOLEAN DEFAULT FALSE
+ ''', conn=conn)
+ logger.info("Added IgnorePodcastIndex column to Podcasts table (PostgreSQL)")
+
+ else: # MySQL
+ # Check if column already exists to avoid duplicate column error
+ safe_execute_sql(cursor, '''
+ SELECT COUNT(*)
+ FROM information_schema.columns
+ WHERE table_name = 'Podcasts'
+ AND column_name = 'IgnorePodcastIndex'
+ AND table_schema = DATABASE()
+ ''', conn=conn)
+
+ result = cursor.fetchone()
+ if result[0] == 0: # Column doesn't exist
+ safe_execute_sql(cursor, '''
+ ALTER TABLE Podcasts
+ ADD COLUMN IgnorePodcastIndex TINYINT(1) DEFAULT 0
+ ''', conn=conn)
+ logger.info("Added IgnorePodcastIndex column to Podcasts table (MySQL)")
+ else:
+ logger.info("IgnorePodcastIndex column already exists in Podcasts table (MySQL)")
+
+ logger.info("IgnorePodcastIndex column migration completed successfully")
+
+ except Exception as e:
+ logger.error(f"Error in IgnorePodcastIndex column migration: {e}")
+ raise
+ finally:
+ cursor.close()
+
+
+@register_migration("029", "fix_people_episodes_table_schema", "Fix PeopleEpisodes table schema to match expected format", requires=["009"])
+def migration_029_fix_people_episodes_table_schema(conn, db_type: str):
+ """
+ Migration 029: Fix PeopleEpisodes table schema
+
+ This migration ensures the PeopleEpisodes table has the correct schema with all required columns.
+ Some databases may have an incomplete PeopleEpisodes table from migration 009.
+ """
+ logger.info("Starting migration 029: Fix PeopleEpisodes table schema")
+ cursor = conn.cursor()
+
+ try:
+ if db_type == 'postgresql':
+ # For PostgreSQL, we'll recreate the table with the correct schema
+ # First check if table exists and get its current structure
+ safe_execute_sql(cursor, '''
+ SELECT column_name
+ FROM information_schema.columns
+ WHERE table_name = 'PeopleEpisodes'
+ AND table_schema = current_schema()
+ ''', conn=conn)
+
+ existing_columns = [row[0] for row in cursor.fetchall()]
+
+ if 'podcastid' not in [col.lower() for col in existing_columns]:
+ logger.info("PeopleEpisodes table missing required columns, recreating...")
+
+ # Drop existing table if it exists with wrong schema
+ safe_execute_sql(cursor, 'DROP TABLE IF EXISTS "PeopleEpisodes"', conn=conn)
+
+ # Create with correct schema
+ safe_execute_sql(cursor, '''
+ CREATE TABLE "PeopleEpisodes" (
+ EpisodeID SERIAL PRIMARY KEY,
+ PersonID INT,
+ PodcastID INT,
+ EpisodeTitle TEXT,
+ EpisodeDescription TEXT,
+ EpisodeURL TEXT,
+ EpisodeArtwork TEXT,
+ EpisodePubDate TIMESTAMP,
+ EpisodeDuration INT,
+ AddedDate TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ FOREIGN KEY (PersonID) REFERENCES "People"(PersonID),
+ FOREIGN KEY (PodcastID) REFERENCES "Podcasts"(PodcastID)
+ )
+ ''', conn=conn)
+ logger.info("Recreated PeopleEpisodes table with correct schema (PostgreSQL)")
+ else:
+ logger.info("PeopleEpisodes table already has correct schema (PostgreSQL)")
+
+ else: # MySQL
+ # For MySQL, check current table structure
+ safe_execute_sql(cursor, '''
+ SELECT column_name
+ FROM information_schema.columns
+ WHERE table_name = 'PeopleEpisodes'
+ AND table_schema = DATABASE()
+ ''', conn=conn)
+
+ existing_columns = [row[0] for row in cursor.fetchall()]
+ logger.info(f"Current PeopleEpisodes columns: {existing_columns}")
+
+ if 'PodcastID' not in existing_columns:
+ logger.info("PeopleEpisodes table missing required columns, recreating...")
+
+ # Backup any existing data first (if the table has useful data)
+ safe_execute_sql(cursor, '''
+ CREATE TABLE IF NOT EXISTS PeopleEpisodes_backup AS
+ SELECT * FROM PeopleEpisodes
+ ''', conn=conn)
+ logger.info("Created backup of existing PeopleEpisodes table")
+
+ # Drop existing table
+ safe_execute_sql(cursor, 'DROP TABLE IF EXISTS PeopleEpisodes', conn=conn)
+
+ # Create with correct schema
+ safe_execute_sql(cursor, '''
+ CREATE TABLE PeopleEpisodes (
+ EpisodeID INT AUTO_INCREMENT PRIMARY KEY,
+ PersonID INT,
+ PodcastID INT,
+ EpisodeTitle TEXT,
+ EpisodeDescription TEXT,
+ EpisodeURL TEXT,
+ EpisodeArtwork TEXT,
+ EpisodePubDate TIMESTAMP,
+ EpisodeDuration INT,
+ AddedDate TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ FOREIGN KEY (PersonID) REFERENCES People(PersonID),
+ FOREIGN KEY (PodcastID) REFERENCES Podcasts(PodcastID)
+ )
+ ''', conn=conn)
+ logger.info("Recreated PeopleEpisodes table with correct schema (MySQL)")
+ else:
+ logger.info("PeopleEpisodes table already has correct schema (MySQL)")
+
+ logger.info("PeopleEpisodes table schema fix completed successfully")
+
+ except Exception as e:
+ logger.error(f"Error in PeopleEpisodes table schema fix migration: {e}")
+ raise
+ finally:
+ cursor.close()
+
+
+@register_migration("030", "add_user_language_preference", "Add Language column to Users table for user-specific language preferences", requires=["001"])
+def migration_030_add_user_language_preference(conn, db_type: str):
+ """Add Language column to Users table for user-specific language preferences"""
+ cursor = conn.cursor()
+
+ try:
+ # Get the default language from environment variable, fallback to 'en'
+ default_language = os.environ.get("DEFAULT_LANGUAGE", "en")
+
+ # Validate language code (basic validation)
+ if not default_language or len(default_language) > 10:
+ default_language = "en"
+
+ logger.info(f"Adding Language column to Users table with default '{default_language}'")
+
+ if db_type == 'postgresql':
+ # Add Language column with default from environment variable
+ safe_execute_sql(cursor, f'''
+ ALTER TABLE "Users"
+ ADD COLUMN IF NOT EXISTS Language VARCHAR(10) DEFAULT '{default_language}'
+ ''', conn=conn)
+
+ # Add comment to document the column
+ safe_execute_sql(cursor, '''
+ COMMENT ON COLUMN "Users".Language IS 'ISO 639-1 language code for user interface language preference'
+ ''', conn=conn)
+
+ else: # mysql/mariadb
+ # Check if column exists first
+ cursor.execute("""
+ SELECT COUNT(*)
+ FROM INFORMATION_SCHEMA.COLUMNS
+ WHERE TABLE_SCHEMA = DATABASE()
+ AND TABLE_NAME = 'Users'
+ AND COLUMN_NAME = 'Language'
+ """)
+
+ if cursor.fetchone()[0] == 0:
+ safe_execute_sql(cursor, f'''
+ ALTER TABLE Users
+ ADD COLUMN Language VARCHAR(10) DEFAULT '{default_language}'
+ COMMENT 'ISO 639-1 language code for user interface language preference'
+ ''', conn=conn)
+
+ logger.info(f"Successfully added Language column to Users table with default '{default_language}'")
+
+ except Exception as e:
+ logger.error(f"Error in migration 030: {e}")
+ raise
+ finally:
+ cursor.close()
+
+
+@register_migration("031", "add_oidc_env_initialized_column", "Add InitializedFromEnv column to OIDCProviders table to track env-initialized providers", requires=["001"])
+def migration_031_add_oidc_env_initialized_column(conn, db_type: str):
+ """Add InitializedFromEnv column to OIDCProviders table to track providers created from environment variables"""
+ cursor = conn.cursor()
+
+ try:
+ logger.info("Adding InitializedFromEnv column to OIDCProviders table")
+
+ if db_type == 'postgresql':
+ # Add InitializedFromEnv column (defaults to false for existing providers)
+ safe_execute_sql(cursor, '''
+ ALTER TABLE "OIDCProviders"
+ ADD COLUMN IF NOT EXISTS InitializedFromEnv BOOLEAN DEFAULT false
+ ''', conn=conn)
+
+ # Add comment to document the column
+ safe_execute_sql(cursor, '''
+ COMMENT ON COLUMN "OIDCProviders".InitializedFromEnv IS 'Indicates if this provider was created from environment variables and should not be removable via UI'
+ ''', conn=conn)
+
+ else: # mysql/mariadb
+ # Check if column exists first
+ cursor.execute("""
+ SELECT COUNT(*)
+ FROM INFORMATION_SCHEMA.COLUMNS
+ WHERE TABLE_SCHEMA = DATABASE()
+ AND TABLE_NAME = 'OIDCProviders'
+ AND COLUMN_NAME = 'InitializedFromEnv'
+ """)
+
+ if cursor.fetchone()[0] == 0:
+ safe_execute_sql(cursor, '''
+ ALTER TABLE OIDCProviders
+ ADD COLUMN InitializedFromEnv TINYINT(1) DEFAULT 0
+ COMMENT 'Indicates if this provider was created from environment variables and should not be removable via UI'
+ ''', conn=conn)
+
+ logger.info("Successfully added InitializedFromEnv column to OIDCProviders table")
+ except Exception as e:
+ logger.error(f"Error in migration 031: {e}")
+ raise
+ finally:
+ cursor.close()
+
+
+@register_migration("032", "create_user_default_playlists", "Create default playlists for all existing users", requires=["012"])
+def migration_032_create_user_default_playlists(conn, db_type: str):
+ """Create default playlists for all existing users, eliminating system playlists"""
+ cursor = conn.cursor()
+
+ try:
+ logger.info("Starting user default playlists migration")
+
+ # First, add the episode_count column to Playlists table if it doesn't exist
+ if db_type == "postgresql":
+ # Check if episode_count column exists
+ cursor.execute("""
+ SELECT column_name FROM information_schema.columns
+ WHERE table_name = 'Playlists'
+ AND column_name = 'episodecount'
+ """)
+ column_exists = len(cursor.fetchall()) > 0
+
+ if not column_exists:
+ cursor.execute("""
+ ALTER TABLE "Playlists"
+ ADD COLUMN episodecount INTEGER DEFAULT 0
+ """)
+ logger.info("Added episode_count column to Playlists table (PostgreSQL)")
+ else:
+ logger.info("episode_count column already exists in Playlists table (PostgreSQL)")
+ else:
+ # Check if episode_count column exists (MySQL)
+ cursor.execute("""
+ SELECT COUNT(*)
+ FROM INFORMATION_SCHEMA.COLUMNS
+ WHERE TABLE_NAME = 'Playlists'
+ AND COLUMN_NAME = 'EpisodeCount'
+ AND TABLE_SCHEMA = DATABASE()
+ """)
+ column_exists = cursor.fetchone()[0] > 0
+
+ if not column_exists:
+ cursor.execute("""
+ ALTER TABLE Playlists
+ ADD COLUMN EpisodeCount INT DEFAULT 0
+ """)
+ logger.info("Added EpisodeCount column to Playlists table (MySQL)")
+ else:
+ logger.info("EpisodeCount column already exists in Playlists table (MySQL)")
+
+ # Define default playlists (same as migration 012 but will be assigned to each user)
+ default_playlists = [
+ {
+ 'name': 'Quick Listens',
+ 'description': 'Short episodes under 15 minutes, perfect for quick breaks',
+ 'min_duration': 1, # Exclude 0-duration episodes
+ 'max_duration': 900, # 15 minutes
+ 'sort_order': 'duration_asc',
+ 'icon_name': 'ph-fast-forward',
+ 'max_episodes': 1000
+ },
+ {
+ 'name': 'Longform',
+ 'description': 'Extended episodes over 1 hour, ideal for long drives or deep dives',
+ 'min_duration': 3600, # 1 hour
+ 'max_duration': None,
+ 'sort_order': 'duration_desc',
+ 'icon_name': 'ph-car',
+ 'max_episodes': 1000
+ },
+ {
+ 'name': 'Currently Listening',
+ 'description': 'Episodes you\'ve started but haven\'t finished',
+ 'min_duration': None,
+ 'max_duration': None,
+ 'sort_order': 'date_desc',
+ 'include_unplayed': False,
+ 'include_partially_played': True,
+ 'include_played': False,
+ 'icon_name': 'ph-play'
+ },
+ {
+ 'name': 'Fresh Releases',
+ 'description': 'Latest episodes from the last 24 hours',
+ 'min_duration': None,
+ 'max_duration': None,
+ 'sort_order': 'date_desc',
+ 'include_unplayed': True,
+ 'include_partially_played': False,
+ 'include_played': False,
+ 'time_filter_hours': 24,
+ 'icon_name': 'ph-sparkle'
+ },
+ {
+ 'name': 'Weekend Marathon',
+ 'description': 'Longer episodes (30+ minutes) perfect for weekend listening',
+ 'min_duration': 1800, # 30 minutes
+ 'max_duration': None,
+ 'sort_order': 'duration_desc',
+ 'group_by_podcast': True,
+ 'icon_name': 'ph-couch',
+ 'max_episodes': 1000
+ },
+ {
+ 'name': 'Commuter Mix',
+ 'description': 'Perfect-length episodes (15-45 minutes) for your daily commute',
+ 'min_duration': 900, # 15 minutes
+ 'max_duration': 2700, # 45 minutes
+ 'sort_order': 'date_desc',
+ 'icon_name': 'ph-car-simple',
+ 'max_episodes': 1000
+ }
+ ]
+
+ # Get all existing users (excluding background user if present)
+ if db_type == "postgresql":
+ cursor.execute('SELECT userid FROM "Users" WHERE userid > 1')
+ else:
+ cursor.execute('SELECT UserID FROM Users WHERE UserID > 1')
+
+ users = cursor.fetchall()
+ logger.info(f"Found {len(users)} users to create default playlists for")
+
+ # Create default playlists for each user
+ for user_row in users:
+ user_id = user_row[0] if isinstance(user_row, tuple) else user_row['userid' if db_type == "postgresql" else 'UserID']
+ logger.info(f"Creating default playlists for user {user_id}")
+
+ for playlist in default_playlists:
+ try:
+ # Check if this playlist already exists for this user
+ if db_type == "postgresql":
+ cursor.execute("""
+ SELECT COUNT(*)
+ FROM "Playlists"
+ WHERE userid = %s AND name = %s
+ """, (user_id, playlist['name']))
+ else:
+ cursor.execute("""
+ SELECT COUNT(*)
+ FROM Playlists
+ WHERE UserID = %s AND Name = %s
+ """, (user_id, playlist['name']))
+
+ if cursor.fetchone()[0] == 0:
+ # Create the playlist for this user
+ if db_type == "postgresql":
+ cursor.execute("""
+ INSERT INTO "Playlists" (
+ userid,
+ name,
+ description,
+ issystemplaylist,
+ minduration,
+ maxduration,
+ sortorder,
+ includeunplayed,
+ includepartiallyplayed,
+ includeplayed,
+ timefilterhours,
+ groupbypodcast,
+ maxepisodes,
+ iconname,
+ episodecount
+ ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
+ """, (
+ user_id,
+ playlist['name'],
+ playlist['description'],
+ False, # No longer system playlists
+ playlist.get('min_duration'),
+ playlist.get('max_duration'),
+ playlist['sort_order'],
+ playlist.get('include_unplayed', True),
+ playlist.get('include_partially_played', True),
+ playlist.get('include_played', True),
+ playlist.get('time_filter_hours'),
+ playlist.get('group_by_podcast', False),
+ playlist.get('max_episodes'),
+ playlist['icon_name'],
+ 0 # Will be updated by scheduled count update
+ ))
+ else:
+ cursor.execute("""
+ INSERT INTO Playlists (
+ UserID,
+ Name,
+ Description,
+ IsSystemPlaylist,
+ MinDuration,
+ MaxDuration,
+ SortOrder,
+ IncludeUnplayed,
+ IncludePartiallyPlayed,
+ IncludePlayed,
+ TimeFilterHours,
+ GroupByPodcast,
+ MaxEpisodes,
+ IconName,
+ EpisodeCount
+ ) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
+ """, (
+ user_id,
+ playlist['name'],
+ playlist['description'],
+ False, # No longer system playlists
+ playlist.get('min_duration'),
+ playlist.get('max_duration'),
+ playlist['sort_order'],
+ playlist.get('include_unplayed', True),
+ playlist.get('include_partially_played', True),
+ playlist.get('include_played', True),
+ playlist.get('time_filter_hours'),
+ playlist.get('group_by_podcast', False),
+ playlist.get('max_episodes'),
+ playlist['icon_name'],
+ 0 # Will be updated by scheduled count update
+ ))
+
+ logger.info(f"Created playlist '{playlist['name']}' for user {user_id}")
+ else:
+ logger.info(f"Playlist '{playlist['name']}' already exists for user {user_id}")
+
+ except Exception as e:
+ logger.error(f"Failed to create playlist '{playlist['name']}' for user {user_id}: {e}")
+ # Continue with other playlists even if one fails
+
+ # Commit all changes
+ conn.commit()
+ logger.info("Successfully created default playlists for all existing users")
+
+ except Exception as e:
+ logger.error(f"Error in user default playlists migration: {e}")
+ raise
+ finally:
+ cursor.close()
+
+
+# ============================================================================
+# GPODDER SYNC MIGRATIONS
+# These migrations match the gpodder-api service migrations from Go code
+# ============================================================================
+
+@register_migration("100", "gpodder_initial_schema", "Create initial gpodder sync tables")
+def migration_100_gpodder_initial_schema(conn, db_type: str):
+ """Create initial gpodder sync schema - matches Go migration version 1"""
+ cursor = conn.cursor()
+
+ try:
+ logger.info("Starting gpodder migration 100: Initial schema creation")
+
+ if db_type == 'postgresql':
+ # Create all gpodder sync tables for PostgreSQL
+ tables_sql = [
+ '''
+ CREATE TABLE IF NOT EXISTS "GpodderSyncMigrations" (
+ Version INT PRIMARY KEY,
+ Description TEXT NOT NULL,
+ AppliedAt TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
+ )
+ ''',
+ '''
+ CREATE TABLE IF NOT EXISTS "GpodderSyncDeviceState" (
+ DeviceStateID SERIAL PRIMARY KEY,
+ UserID INT NOT NULL,
+ DeviceID INT NOT NULL,
+ SubscriptionCount INT DEFAULT 0,
+ LastUpdated TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ FOREIGN KEY (UserID) REFERENCES "Users"(UserID) ON DELETE CASCADE,
+ FOREIGN KEY (DeviceID) REFERENCES "GpodderDevices"(DeviceID) ON DELETE CASCADE,
+ UNIQUE(UserID, DeviceID)
+ )
+ ''',
+ '''
+ CREATE TABLE IF NOT EXISTS "GpodderSyncSubscriptions" (
+ SubscriptionID SERIAL PRIMARY KEY,
+ UserID INT NOT NULL,
+ DeviceID INT NOT NULL,
+ PodcastURL TEXT NOT NULL,
+ Action VARCHAR(10) NOT NULL,
+ Timestamp BIGINT NOT NULL,
+ FOREIGN KEY (UserID) REFERENCES "Users"(UserID) ON DELETE CASCADE,
+ FOREIGN KEY (DeviceID) REFERENCES "GpodderDevices"(DeviceID) ON DELETE CASCADE
+ )
+ ''',
+ '''
+ CREATE TABLE IF NOT EXISTS "GpodderSyncEpisodeActions" (
+ ActionID SERIAL PRIMARY KEY,
+ UserID INT NOT NULL,
+ DeviceID INT,
+ PodcastURL TEXT NOT NULL,
+ EpisodeURL TEXT NOT NULL,
+ Action VARCHAR(20) NOT NULL,
+ Timestamp BIGINT NOT NULL,
+ Started INT,
+ Position INT,
+ Total INT,
+ FOREIGN KEY (UserID) REFERENCES "Users"(UserID) ON DELETE CASCADE,
+ FOREIGN KEY (DeviceID) REFERENCES "GpodderDevices"(DeviceID) ON DELETE CASCADE
+ )
+ ''',
+ '''
+ CREATE TABLE IF NOT EXISTS "GpodderSyncPodcastLists" (
+ ListID SERIAL PRIMARY KEY,
+ UserID INT NOT NULL,
+ Name VARCHAR(255) NOT NULL,
+ Title VARCHAR(255) NOT NULL,
+ CreatedAt TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ FOREIGN KEY (UserID) REFERENCES "Users"(UserID) ON DELETE CASCADE,
+ UNIQUE(UserID, Name)
+ )
+ ''',
+ '''
+ CREATE TABLE IF NOT EXISTS "GpodderSyncPodcastListEntries" (
+ EntryID SERIAL PRIMARY KEY,
+ ListID INT NOT NULL,
+ PodcastURL TEXT NOT NULL,
+ CreatedAt TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ FOREIGN KEY (ListID) REFERENCES "GpodderSyncPodcastLists"(ListID) ON DELETE CASCADE
+ )
+ ''',
+ '''
+ CREATE TABLE IF NOT EXISTS "GpodderSyncDevicePairs" (
+ PairID SERIAL PRIMARY KEY,
+ UserID INT NOT NULL,
+ DeviceID1 INT NOT NULL,
+ DeviceID2 INT NOT NULL,
+ CreatedAt TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ FOREIGN KEY (UserID) REFERENCES "Users"(UserID) ON DELETE CASCADE,
+ FOREIGN KEY (DeviceID1) REFERENCES "GpodderDevices"(DeviceID) ON DELETE CASCADE,
+ FOREIGN KEY (DeviceID2) REFERENCES "GpodderDevices"(DeviceID) ON DELETE CASCADE,
+ UNIQUE(UserID, DeviceID1, DeviceID2)
+ )
+ ''',
+ '''
+ CREATE TABLE IF NOT EXISTS "GpodderSyncSettings" (
+ SettingID SERIAL PRIMARY KEY,
+ UserID INT NOT NULL,
+ Scope VARCHAR(20) NOT NULL,
+ DeviceID INT,
+ PodcastURL TEXT,
+ EpisodeURL TEXT,
+ SettingKey VARCHAR(255) NOT NULL,
+ SettingValue TEXT,
+ CreatedAt TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ LastUpdated TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ FOREIGN KEY (UserID) REFERENCES "Users"(UserID) ON DELETE CASCADE,
+ FOREIGN KEY (DeviceID) REFERENCES "GpodderDevices"(DeviceID) ON DELETE CASCADE
+ )
+ '''
+ ]
+
+ # Create indexes
+ indexes_sql = [
+ 'CREATE INDEX IF NOT EXISTS idx_gpodder_sync_subscriptions_userid ON "GpodderSyncSubscriptions"(UserID)',
+ 'CREATE INDEX IF NOT EXISTS idx_gpodder_sync_subscriptions_deviceid ON "GpodderSyncSubscriptions"(DeviceID)',
+ 'CREATE INDEX IF NOT EXISTS idx_gpodder_sync_episode_actions_userid ON "GpodderSyncEpisodeActions"(UserID)',
+ 'CREATE INDEX IF NOT EXISTS idx_gpodder_sync_podcast_lists_userid ON "GpodderSyncPodcastLists"(UserID)'
+ ]
+
+ else: # mysql
+ # Create all gpodder sync tables for MySQL
+ tables_sql = [
+ '''
+ CREATE TABLE IF NOT EXISTS GpodderSyncMigrations (
+ Version INT PRIMARY KEY,
+ Description TEXT NOT NULL,
+ AppliedAt TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
+ )
+ ''',
+ '''
+ CREATE TABLE IF NOT EXISTS GpodderSyncDeviceState (
+ DeviceStateID INT AUTO_INCREMENT PRIMARY KEY,
+ UserID INT NOT NULL,
+ DeviceID INT NOT NULL,
+ SubscriptionCount INT DEFAULT 0,
+ LastUpdated TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ FOREIGN KEY (UserID) REFERENCES Users(UserID) ON DELETE CASCADE,
+ FOREIGN KEY (DeviceID) REFERENCES GpodderDevices(DeviceID) ON DELETE CASCADE,
+ UNIQUE(UserID, DeviceID)
+ )
+ ''',
+ '''
+ CREATE TABLE IF NOT EXISTS GpodderSyncSubscriptions (
+ SubscriptionID INT AUTO_INCREMENT PRIMARY KEY,
+ UserID INT NOT NULL,
+ DeviceID INT NOT NULL,
+ PodcastURL TEXT NOT NULL,
+ Action VARCHAR(10) NOT NULL,
+ Timestamp BIGINT NOT NULL,
+ FOREIGN KEY (UserID) REFERENCES Users(UserID) ON DELETE CASCADE,
+ FOREIGN KEY (DeviceID) REFERENCES GpodderDevices(DeviceID) ON DELETE CASCADE
+ )
+ ''',
+ '''
+ CREATE TABLE IF NOT EXISTS GpodderSyncEpisodeActions (
+ ActionID INT AUTO_INCREMENT PRIMARY KEY,
+ UserID INT NOT NULL,
+ DeviceID INT,
+ PodcastURL TEXT NOT NULL,
+ EpisodeURL TEXT NOT NULL,
+ Action VARCHAR(20) NOT NULL,
+ Timestamp BIGINT NOT NULL,
+ Started INT,
+ Position INT,
+ Total INT,
+ FOREIGN KEY (UserID) REFERENCES Users(UserID) ON DELETE CASCADE,
+ FOREIGN KEY (DeviceID) REFERENCES GpodderDevices(DeviceID) ON DELETE CASCADE
+ )
+ ''',
+ '''
+ CREATE TABLE IF NOT EXISTS GpodderSyncPodcastLists (
+ ListID INT AUTO_INCREMENT PRIMARY KEY,
+ UserID INT NOT NULL,
+ Name VARCHAR(255) NOT NULL,
+ Title VARCHAR(255) NOT NULL,
+ CreatedAt TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ FOREIGN KEY (UserID) REFERENCES Users(UserID) ON DELETE CASCADE,
+ UNIQUE(UserID, Name)
+ )
+ ''',
+ '''
+ CREATE TABLE IF NOT EXISTS GpodderSyncPodcastListEntries (
+ EntryID INT AUTO_INCREMENT PRIMARY KEY,
+ ListID INT NOT NULL,
+ PodcastURL TEXT NOT NULL,
+ CreatedAt TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ FOREIGN KEY (ListID) REFERENCES GpodderSyncPodcastLists(ListID) ON DELETE CASCADE
+ )
+ ''',
+ '''
+ CREATE TABLE IF NOT EXISTS GpodderSyncDevicePairs (
+ PairID INT AUTO_INCREMENT PRIMARY KEY,
+ UserID INT NOT NULL,
+ DeviceID1 INT NOT NULL,
+ DeviceID2 INT NOT NULL,
+ CreatedAt TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ FOREIGN KEY (UserID) REFERENCES Users(UserID) ON DELETE CASCADE,
+ FOREIGN KEY (DeviceID1) REFERENCES GpodderDevices(DeviceID) ON DELETE CASCADE,
+ FOREIGN KEY (DeviceID2) REFERENCES GpodderDevices(DeviceID) ON DELETE CASCADE,
+ UNIQUE(UserID, DeviceID1, DeviceID2)
+ )
+ ''',
+ '''
+ CREATE TABLE IF NOT EXISTS GpodderSyncSettings (
+ SettingID INT AUTO_INCREMENT PRIMARY KEY,
+ UserID INT NOT NULL,
+ Scope VARCHAR(20) NOT NULL,
+ DeviceID INT,
+ PodcastURL TEXT,
+ EpisodeURL TEXT,
+ SettingKey VARCHAR(255) NOT NULL,
+ SettingValue TEXT,
+ CreatedAt TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ LastUpdated TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
+ FOREIGN KEY (UserID) REFERENCES Users(UserID) ON DELETE CASCADE,
+ FOREIGN KEY (DeviceID) REFERENCES GpodderDevices(DeviceID) ON DELETE CASCADE
+ )
+ '''
+ ]
+
+ # Create indexes
+ indexes_sql = [
+ 'CREATE INDEX idx_gpodder_sync_subscriptions_userid ON GpodderSyncSubscriptions(UserID)',
+ 'CREATE INDEX idx_gpodder_sync_subscriptions_deviceid ON GpodderSyncSubscriptions(DeviceID)',
+ 'CREATE INDEX idx_gpodder_sync_episode_actions_userid ON GpodderSyncEpisodeActions(UserID)',
+ 'CREATE INDEX idx_gpodder_sync_podcast_lists_userid ON GpodderSyncPodcastLists(UserID)'
+ ]
+
+ # Execute table creation
+ for sql in tables_sql:
+ safe_execute_sql(cursor, sql, conn=conn)
+
+ # Execute index creation
+ for sql in indexes_sql:
+ safe_execute_sql(cursor, sql, conn=conn)
+
+ logger.info("Created gpodder sync initial schema successfully")
+
+ except Exception as e:
+ logger.error(f"Error in gpodder migration 100: {e}")
+ raise
+ finally:
+ cursor.close()
+
+
+@register_migration("101", "gpodder_add_api_version", "Add API version column to GpodderSyncSettings")
+def migration_101_gpodder_add_api_version(conn, db_type: str):
+ """Add API version column - matches Go migration version 2"""
+ cursor = conn.cursor()
+
+ try:
+ logger.info("Starting gpodder migration 101: Add API version column")
+
+ if db_type == 'postgresql':
+ safe_execute_sql(cursor, '''
+ ALTER TABLE "GpodderSyncSettings"
+ ADD COLUMN IF NOT EXISTS APIVersion VARCHAR(10) DEFAULT '2.0'
+ ''', conn=conn)
+ else: # mysql
+ # Check if column exists first, then add if it doesn't
+ cursor.execute("""
+ SELECT COUNT(*) FROM INFORMATION_SCHEMA.COLUMNS
+ WHERE TABLE_NAME = 'GpodderSyncSettings'
+ AND COLUMN_NAME = 'APIVersion'
+ AND TABLE_SCHEMA = DATABASE()
+ """)
+
+ if cursor.fetchone()[0] == 0:
+ safe_execute_sql(cursor, '''
+ ALTER TABLE GpodderSyncSettings
+ ADD COLUMN APIVersion VARCHAR(10) DEFAULT '2.0'
+ ''', conn=conn)
+ logger.info("Added APIVersion column to GpodderSyncSettings")
+ else:
+ logger.info("APIVersion column already exists in GpodderSyncSettings")
+
+ logger.info("Gpodder API version migration completed successfully")
+
+ except Exception as e:
+ logger.error(f"Error in gpodder migration 101: {e}")
+ raise
+ finally:
+ cursor.close()
+
+
+@register_migration("102", "gpodder_create_sessions", "Create GpodderSessions table for API sessions")
+def migration_102_gpodder_create_sessions(conn, db_type: str):
+ """Create GpodderSessions table - matches Go migration version 3"""
+ cursor = conn.cursor()
+
+ try:
+ logger.info("Starting gpodder migration 102: Create GpodderSessions table")
+
+ if db_type == 'postgresql':
+ safe_execute_sql(cursor, '''
+ CREATE TABLE IF NOT EXISTS "GpodderSessions" (
+ SessionID SERIAL PRIMARY KEY,
+ UserID INT NOT NULL,
+ SessionToken TEXT NOT NULL,
+ CreatedAt TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ ExpiresAt TIMESTAMP NOT NULL,
+ LastActive TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ UserAgent TEXT,
+ ClientIP TEXT,
+ FOREIGN KEY (UserID) REFERENCES "Users"(UserID) ON DELETE CASCADE,
+ UNIQUE(SessionToken)
+ )
+ ''', conn=conn)
+
+ # Create indexes
+ indexes_sql = [
+ 'CREATE INDEX IF NOT EXISTS idx_gpodder_sessions_token ON "GpodderSessions"(SessionToken)',
+ 'CREATE INDEX IF NOT EXISTS idx_gpodder_sessions_userid ON "GpodderSessions"(UserID)',
+ 'CREATE INDEX IF NOT EXISTS idx_gpodder_sessions_expires ON "GpodderSessions"(ExpiresAt)'
+ ]
+ else: # mysql
+ safe_execute_sql(cursor, '''
+ CREATE TABLE IF NOT EXISTS GpodderSessions (
+ SessionID INT AUTO_INCREMENT PRIMARY KEY,
+ UserID INT NOT NULL,
+ SessionToken TEXT NOT NULL,
+ CreatedAt TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ ExpiresAt TIMESTAMP NOT NULL,
+ LastActive TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ UserAgent TEXT,
+ ClientIP TEXT,
+ FOREIGN KEY (UserID) REFERENCES Users(UserID) ON DELETE CASCADE
+ )
+ ''', conn=conn)
+
+ # Create indexes
+ indexes_sql = [
+ 'CREATE INDEX idx_gpodder_sessions_userid ON GpodderSessions(UserID)',
+ 'CREATE INDEX idx_gpodder_sessions_expires ON GpodderSessions(ExpiresAt)'
+ ]
+
+ # Execute index creation
+ for sql in indexes_sql:
+ safe_execute_sql(cursor, sql, conn=conn)
+
+ logger.info("Created GpodderSessions table successfully")
+
+ except Exception as e:
+ logger.error(f"Error in gpodder migration 102: {e}")
+ raise
+ finally:
+ cursor.close()
+
+
+@register_migration("103", "gpodder_sync_state_table", "Add sync state table for tracking device sync status")
+def migration_103_gpodder_sync_state_table(conn, db_type: str):
+ """Create GpodderSyncState table - matches Go migration version 4"""
+ cursor = conn.cursor()
+
+ try:
+ logger.info("Starting gpodder migration 103: Add sync state table")
+
+ if db_type == 'postgresql':
+ safe_execute_sql(cursor, '''
+ CREATE TABLE IF NOT EXISTS "GpodderSyncState" (
+ SyncStateID SERIAL PRIMARY KEY,
+ UserID INT NOT NULL,
+ DeviceID INT NOT NULL,
+ LastTimestamp BIGINT DEFAULT 0,
+ LastSync TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ FOREIGN KEY (UserID) REFERENCES "Users"(UserID) ON DELETE CASCADE,
+ FOREIGN KEY (DeviceID) REFERENCES "GpodderDevices"(DeviceID) ON DELETE CASCADE,
+ UNIQUE(UserID, DeviceID)
+ )
+ ''', conn=conn)
+
+ safe_execute_sql(cursor, '''
+ CREATE INDEX IF NOT EXISTS idx_gpodder_syncstate_userid_deviceid ON "GpodderSyncState"(UserID, DeviceID)
+ ''', conn=conn)
+ else: # mysql
+ safe_execute_sql(cursor, '''
+ CREATE TABLE IF NOT EXISTS GpodderSyncState (
+ SyncStateID INT AUTO_INCREMENT PRIMARY KEY,
+ UserID INT NOT NULL,
+ DeviceID INT NOT NULL,
+ LastTimestamp BIGINT DEFAULT 0,
+ LastSync TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
+ FOREIGN KEY (UserID) REFERENCES Users(UserID) ON DELETE CASCADE,
+ FOREIGN KEY (DeviceID) REFERENCES GpodderDevices(DeviceID) ON DELETE CASCADE,
+ UNIQUE(UserID, DeviceID)
+ )
+ ''', conn=conn)
+
+ safe_execute_sql(cursor, '''
+ CREATE INDEX idx_gpodder_syncstate_userid_deviceid ON GpodderSyncState(UserID, DeviceID)
+ ''', conn=conn)
+
+ logger.info("Created GpodderSyncState table successfully")
+
+ except Exception as e:
+ logger.error(f"Error in gpodder migration 103: {e}")
+ raise
+ finally:
+ cursor.close()
+
+
+@register_migration("104", "create_people_episodes_backup", "Skip PeopleEpisodes_backup - varies by installation")
+def migration_104_create_people_episodes_backup(conn, db_type: str):
+ """Skip PeopleEpisodes_backup table - this varies by installation and shouldn't be validated"""
+ logger.info("Skipping migration 104: PeopleEpisodes_backup table varies by installation")
+ # This migration is a no-op since backup tables vary by installation
+ # and shouldn't be part of the expected schema
+
+
+@register_migration("105", "optimize_episode_actions_performance", "Add indexes and optimize episode actions queries")
+def migration_105_optimize_episode_actions_performance(conn, db_type: str):
+ """Add critical indexes for episode actions performance and create optimized views"""
+ cursor = conn.cursor()
+
+ try:
+ logger.info("Adding performance indexes for episode actions...")
+
+ if db_type == 'postgresql':
+ # Critical indexes for episode actions performance
+ safe_execute_sql(cursor, '''
+ CREATE INDEX IF NOT EXISTS idx_episode_actions_user_timestamp
+ ON "GpodderSyncEpisodeActions"(UserID, Timestamp DESC)
+ ''', conn=conn)
+
+ safe_execute_sql(cursor, '''
+ CREATE INDEX IF NOT EXISTS idx_episode_actions_device_timestamp
+ ON "GpodderSyncEpisodeActions"(DeviceID, Timestamp DESC)
+ WHERE DeviceID IS NOT NULL
+ ''', conn=conn)
+
+ safe_execute_sql(cursor, '''
+ CREATE INDEX IF NOT EXISTS idx_episode_actions_podcast_episode
+ ON "GpodderSyncEpisodeActions"(UserID, PodcastURL, EpisodeURL, Timestamp DESC)
+ ''', conn=conn)
+
+ safe_execute_sql(cursor, '''
+ CREATE INDEX IF NOT EXISTS idx_episode_actions_since_filter
+ ON "GpodderSyncEpisodeActions"(UserID, Timestamp DESC, DeviceID)
+ WHERE Timestamp > 0
+ ''', conn=conn)
+
+ # Optimize devices table lookups
+ safe_execute_sql(cursor, '''
+ CREATE INDEX IF NOT EXISTS idx_gpodder_devices_user_name
+ ON "GpodderDevices"(UserID, DeviceName)
+ WHERE IsActive = true
+ ''', conn=conn)
+
+ else: # mysql/mariadb
+ # Critical indexes for episode actions performance
+ safe_execute_sql(cursor, '''
+ CREATE INDEX idx_episode_actions_user_timestamp
+ ON GpodderSyncEpisodeActions(UserID, Timestamp DESC)
+ ''', conn=conn)
+
+ safe_execute_sql(cursor, '''
+ CREATE INDEX idx_episode_actions_device_timestamp
+ ON GpodderSyncEpisodeActions(DeviceID, Timestamp DESC)
+ ''', conn=conn)
+
+ safe_execute_sql(cursor, '''
+ CREATE INDEX idx_episode_actions_podcast_episode
+ ON GpodderSyncEpisodeActions(UserID, PodcastURL(255), EpisodeURL(255), Timestamp DESC)
+ ''', conn=conn)
+
+ safe_execute_sql(cursor, '''
+ CREATE INDEX idx_episode_actions_since_filter
+ ON GpodderSyncEpisodeActions(UserID, Timestamp DESC, DeviceID)
+ ''', conn=conn)
+
+ # Optimize devices table lookups
+ safe_execute_sql(cursor, '''
+ CREATE INDEX idx_gpodder_devices_user_name
+ ON GpodderDevices(UserID, DeviceName)
+ ''', conn=conn)
+
+ logger.info("Successfully added episode actions performance indexes")
+
+ except Exception as e:
+ logger.error(f"Error in gpodder migration 105: {e}")
+ raise
+ finally:
+ cursor.close()
+
+
+@register_migration("106", "optimize_subscription_sync_performance", "Add missing indexes for subscription sync queries", requires=["103"])
+def migration_106_optimize_subscription_sync_performance(conn, db_type: str):
+ """Add critical indexes for subscription sync performance to prevent AntennaPod timeouts"""
+ cursor = conn.cursor()
+
+ try:
+ logger.info("Adding performance indexes for subscription sync...")
+
+ if db_type == 'postgresql':
+ # Critical indexes for subscription sync performance
+ safe_execute_sql(cursor, '''
+ CREATE INDEX IF NOT EXISTS idx_gpodder_sync_subs_user_device_timestamp
+ ON "GpodderSyncSubscriptions"(UserID, DeviceID, Timestamp DESC)
+ ''', conn=conn)
+
+ safe_execute_sql(cursor, '''
+ CREATE INDEX IF NOT EXISTS idx_gpodder_sync_subs_user_action_timestamp
+ ON "GpodderSyncSubscriptions"(UserID, Action, Timestamp DESC)
+ ''', conn=conn)
+
+ safe_execute_sql(cursor, '''
+ CREATE INDEX IF NOT EXISTS idx_gpodder_sync_subs_podcast_url_user
+ ON "GpodderSyncSubscriptions"(UserID, PodcastURL, Timestamp DESC)
+ ''', conn=conn)
+
+ # Optimize subscription change queries with compound index
+ safe_execute_sql(cursor, '''
+ CREATE INDEX IF NOT EXISTS idx_gpodder_sync_subs_complex_query
+ ON "GpodderSyncSubscriptions"(UserID, DeviceID, Action, Timestamp DESC, PodcastURL)
+ ''', conn=conn)
+
+ else: # mysql/mariadb
+ # Critical indexes for subscription sync performance
+ safe_execute_sql(cursor, '''
+ CREATE INDEX idx_gpodder_sync_subs_user_device_timestamp
+ ON GpodderSyncSubscriptions(UserID, DeviceID, Timestamp DESC)
+ ''', conn=conn)
+
+ safe_execute_sql(cursor, '''
+ CREATE INDEX idx_gpodder_sync_subs_user_action_timestamp
+ ON GpodderSyncSubscriptions(UserID, Action, Timestamp DESC)
+ ''', conn=conn)
+
+ safe_execute_sql(cursor, '''
+ CREATE INDEX idx_gpodder_sync_subs_podcast_url_user
+ ON GpodderSyncSubscriptions(UserID, PodcastURL(255), Timestamp DESC)
+ ''', conn=conn)
+
+ # Optimize subscription change queries with compound index
+ safe_execute_sql(cursor, '''
+ CREATE INDEX idx_gpodder_sync_subs_complex_query
+ ON GpodderSyncSubscriptions(UserID, DeviceID, Action, Timestamp DESC, PodcastURL(255))
+ ''', conn=conn)
+
+ logger.info("Successfully added subscription sync performance indexes")
+
+ except Exception as e:
+ logger.error(f"Error in gpodder migration 106: {e}")
+ raise
+ finally:
+ cursor.close()
+
+
+@register_migration("033", "add_http_notification_columns", "Add generic HTTP notification columns to UserNotificationSettings table", requires=["011"])
+def migration_033_add_http_notification_columns(conn, db_type: str):
+ """Add generic HTTP notification columns for platforms like Telegram"""
+ cursor = conn.cursor()
+
+ try:
+ if db_type == "postgresql":
+ # Check if columns already exist (PostgreSQL - lowercase column names)
+ cursor.execute("""
+ SELECT column_name FROM information_schema.columns
+ WHERE table_name = 'UserNotificationSettings'
+ AND column_name IN ('httpurl', 'httptoken', 'httpmethod')
+ """)
+ existing_columns = [row[0] for row in cursor.fetchall()]
+
+ if 'httpurl' not in existing_columns:
+ cursor.execute("""
+ ALTER TABLE "UserNotificationSettings"
+ ADD COLUMN HttpUrl VARCHAR(500)
+ """)
+ logger.info("Added HttpUrl column to UserNotificationSettings table (PostgreSQL)")
+
+ if 'httptoken' not in existing_columns:
+ cursor.execute("""
+ ALTER TABLE "UserNotificationSettings"
+ ADD COLUMN HttpToken VARCHAR(255)
+ """)
+ logger.info("Added HttpToken column to UserNotificationSettings table (PostgreSQL)")
+
+ if 'httpmethod' not in existing_columns:
+ cursor.execute("""
+ ALTER TABLE "UserNotificationSettings"
+ ADD COLUMN HttpMethod VARCHAR(10) DEFAULT 'POST'
+ """)
+ logger.info("Added HttpMethod column to UserNotificationSettings table (PostgreSQL)")
+
+ else:
+ # Check if columns already exist (MySQL)
+ cursor.execute("""
+ SELECT COUNT(*)
+ FROM information_schema.columns
+ WHERE table_name = 'UserNotificationSettings'
+ AND column_name = 'HttpUrl'
+ AND table_schema = DATABASE()
+ """)
+ url_exists = cursor.fetchone()[0] > 0
+
+ cursor.execute("""
+ SELECT COUNT(*)
+ FROM information_schema.columns
+ WHERE table_name = 'UserNotificationSettings'
+ AND column_name = 'HttpToken'
+ AND table_schema = DATABASE()
+ """)
+ token_exists = cursor.fetchone()[0] > 0
+
+ cursor.execute("""
+ SELECT COUNT(*)
+ FROM information_schema.columns
+ WHERE table_name = 'UserNotificationSettings'
+ AND column_name = 'HttpMethod'
+ AND table_schema = DATABASE()
+ """)
+ method_exists = cursor.fetchone()[0] > 0
+
+ if not url_exists:
+ cursor.execute("""
+ ALTER TABLE UserNotificationSettings
+ ADD COLUMN HttpUrl VARCHAR(500)
+ """)
+ logger.info("Added HttpUrl column to UserNotificationSettings table (MySQL)")
+
+ if not token_exists:
+ cursor.execute("""
+ ALTER TABLE UserNotificationSettings
+ ADD COLUMN HttpToken VARCHAR(255)
+ """)
+ logger.info("Added HttpToken column to UserNotificationSettings table (MySQL)")
+
+ if not method_exists:
+ cursor.execute("""
+ ALTER TABLE UserNotificationSettings
+ ADD COLUMN HttpMethod VARCHAR(10) DEFAULT 'POST'
+ """)
+ logger.info("Added HttpMethod column to UserNotificationSettings table (MySQL)")
+
+ logger.info("HTTP notification columns migration completed successfully")
+
+ finally:
+ cursor.close()
+
+
+@register_migration("034", "add_podcast_merge_columns", "Add podcast merge columns to support merging podcasts", requires=["033"])
+def migration_034_add_podcast_merge_columns(conn, db_type: str):
+ """Add DisplayPodcast, RefreshPodcast, and MergedPodcastIDs columns to Podcasts table"""
+ cursor = conn.cursor()
+
+ try:
+ if db_type == "postgresql":
+ # Check if columns already exist (PostgreSQL)
+ cursor.execute("""
+ SELECT column_name FROM information_schema.columns
+ WHERE table_name = 'Podcasts'
+ AND column_name IN ('displaypodcast', 'refreshpodcast', 'mergedpodcastids')
+ """)
+ existing_columns = [row[0] for row in cursor.fetchall()]
+
+ if 'displaypodcast' not in existing_columns:
+ cursor.execute("""
+ ALTER TABLE "Podcasts"
+ ADD COLUMN DisplayPodcast BOOLEAN DEFAULT TRUE
+ """)
+ logger.info("Added DisplayPodcast column to Podcasts table (PostgreSQL)")
+
+ if 'refreshpodcast' not in existing_columns:
+ cursor.execute("""
+ ALTER TABLE "Podcasts"
+ ADD COLUMN RefreshPodcast BOOLEAN DEFAULT TRUE
+ """)
+ logger.info("Added RefreshPodcast column to Podcasts table (PostgreSQL)")
+
+ if 'mergedpodcastids' not in existing_columns:
+ cursor.execute("""
+ ALTER TABLE "Podcasts"
+ ADD COLUMN MergedPodcastIDs TEXT
+ """)
+ logger.info("Added MergedPodcastIDs column to Podcasts table (PostgreSQL)")
+
+ else: # MySQL
+ # Check if columns already exist (MySQL)
+ cursor.execute("""
+ SELECT COUNT(*)
+ FROM information_schema.columns
+ WHERE table_name = 'Podcasts'
+ AND column_name = 'DisplayPodcast'
+ AND table_schema = DATABASE()
+ """)
+ display_exists = cursor.fetchone()[0] > 0
+
+ cursor.execute("""
+ SELECT COUNT(*)
+ FROM information_schema.columns
+ WHERE table_name = 'Podcasts'
+ AND column_name = 'RefreshPodcast'
+ AND table_schema = DATABASE()
+ """)
+ refresh_exists = cursor.fetchone()[0] > 0
+
+ cursor.execute("""
+ SELECT COUNT(*)
+ FROM information_schema.columns
+ WHERE table_name = 'Podcasts'
+ AND column_name = 'MergedPodcastIDs'
+ AND table_schema = DATABASE()
+ """)
+ merged_exists = cursor.fetchone()[0] > 0
+
+ if not display_exists:
+ cursor.execute("""
+ ALTER TABLE Podcasts
+ ADD COLUMN DisplayPodcast TINYINT(1) DEFAULT 1
+ """)
+ logger.info("Added DisplayPodcast column to Podcasts table (MySQL)")
+
+ if not refresh_exists:
+ cursor.execute("""
+ ALTER TABLE Podcasts
+ ADD COLUMN RefreshPodcast TINYINT(1) DEFAULT 1
+ """)
+ logger.info("Added RefreshPodcast column to Podcasts table (MySQL)")
+
+ if not merged_exists:
+ cursor.execute("""
+ ALTER TABLE Podcasts
+ ADD COLUMN MergedPodcastIDs TEXT
+ """)
+ logger.info("Added MergedPodcastIDs column to Podcasts table (MySQL)")
+
+ # Add index on DisplayPodcast for performance
+ table_quote = "`" if db_type != "postgresql" else '"'
+ safe_add_index(cursor, db_type,
+ f'CREATE INDEX idx_podcasts_displaypodcast ON {table_quote}Podcasts{table_quote} (DisplayPodcast)',
+ 'idx_podcasts_displaypodcast')
+
+ logger.info("Podcast merge columns migration completed successfully")
+
+ finally:
+ cursor.close()
+
+
+@register_migration("035", "add_podcast_cover_preference_columns", "Add podcast cover preference columns to Users and Podcasts tables", requires=["034"])
+def migration_035_add_podcast_cover_preference_columns(conn, db_type: str):
+ """Add podcast cover preference columns to Users and Podcasts tables for existing installations"""
+ cursor = conn.cursor()
+
+ try:
+ # Add UsePodcastCovers to Users table if it doesn't exist
+ try:
+ if db_type == "postgresql":
+ cursor.execute("""
+ ALTER TABLE "Users"
+ ADD COLUMN IF NOT EXISTS UsePodcastCovers BOOLEAN DEFAULT FALSE
+ """)
+ else: # MySQL/MariaDB
+ # Check if column exists first
+ cursor.execute("""
+ SELECT COUNT(*)
+ FROM INFORMATION_SCHEMA.COLUMNS
+ WHERE TABLE_SCHEMA = DATABASE()
+ AND TABLE_NAME = 'Users'
+ AND COLUMN_NAME = 'UsePodcastCovers'
+ """)
+ if cursor.fetchone()[0] == 0:
+ cursor.execute("""
+ ALTER TABLE Users
+ ADD COLUMN UsePodcastCovers TINYINT(1) DEFAULT 0
+ """)
+ logger.info("Added UsePodcastCovers column to Users table")
+ else:
+ logger.info("UsePodcastCovers column already exists in Users table")
+
+ except Exception as e:
+ logger.error(f"Error adding UsePodcastCovers to Users table: {e}")
+
+ # Add UsePodcastCovers columns to Podcasts table if they don't exist
+ try:
+ if db_type == "postgresql":
+ cursor.execute("""
+ ALTER TABLE "Podcasts"
+ ADD COLUMN IF NOT EXISTS UsePodcastCovers BOOLEAN DEFAULT FALSE,
+ ADD COLUMN IF NOT EXISTS UsePodcastCoversCustomized BOOLEAN DEFAULT FALSE
+ """)
+ else: # MySQL/MariaDB
+ # Check if UsePodcastCovers column exists
+ cursor.execute("""
+ SELECT COUNT(*)
+ FROM INFORMATION_SCHEMA.COLUMNS
+ WHERE TABLE_SCHEMA = DATABASE()
+ AND TABLE_NAME = 'Podcasts'
+ AND COLUMN_NAME = 'UsePodcastCovers'
+ """)
+ if cursor.fetchone()[0] == 0:
+ cursor.execute("""
+ ALTER TABLE Podcasts
+ ADD COLUMN UsePodcastCovers TINYINT(1) DEFAULT 0
+ """)
+ logger.info("Added UsePodcastCovers column to Podcasts table")
+ else:
+ logger.info("UsePodcastCovers column already exists in Podcasts table")
+
+ # Check if UsePodcastCoversCustomized column exists
+ cursor.execute("""
+ SELECT COUNT(*)
+ FROM INFORMATION_SCHEMA.COLUMNS
+ WHERE TABLE_SCHEMA = DATABASE()
+ AND TABLE_NAME = 'Podcasts'
+ AND COLUMN_NAME = 'UsePodcastCoversCustomized'
+ """)
+ if cursor.fetchone()[0] == 0:
+ cursor.execute("""
+ ALTER TABLE Podcasts
+ ADD COLUMN UsePodcastCoversCustomized TINYINT(1) DEFAULT 0
+ """)
+ logger.info("Added UsePodcastCoversCustomized column to Podcasts table")
+ else:
+ logger.info("UsePodcastCoversCustomized column already exists in Podcasts table")
+
+ except Exception as e:
+ logger.error(f"Error adding UsePodcastCovers columns to Podcasts table: {e}")
+
+ logger.info("Podcast cover preference columns migration completed successfully")
+
+ finally:
+ cursor.close()
+
+
+@register_migration("036", "add_episodecount_column_to_playlists", "Add episodecount column to Playlists table for tracking episode counts", requires=["010"])
+def migration_036_add_episodecount_column(conn, db_type: str):
+ """Add episodecount column to Playlists table if it doesn't exist
+
+ This migration was needed because migration 032 was applied to existing databases
+ before the episodecount column addition was added to it. Since migration 032 is
+ already marked as applied in those databases, the column was never created.
+ """
+ cursor = conn.cursor()
+
+ try:
+ logger.info("Checking for episodecount column in Playlists table")
+
+ if db_type == "postgresql":
+ # Check if episodecount column exists
+ cursor.execute("""
+ SELECT column_name FROM information_schema.columns
+ WHERE table_name = 'Playlists'
+ AND column_name = 'episodecount'
+ """)
+ column_exists = len(cursor.fetchall()) > 0
+
+ if not column_exists:
+ cursor.execute("""
+ ALTER TABLE "Playlists"
+ ADD COLUMN episodecount INTEGER DEFAULT 0
+ """)
+ logger.info("Added episodecount column to Playlists table (PostgreSQL)")
+ else:
+ logger.info("episodecount column already exists in Playlists table (PostgreSQL)")
+ else:
+ # Check if episodecount column exists (MySQL/MariaDB)
+ cursor.execute("""
+ SELECT COUNT(*)
+ FROM INFORMATION_SCHEMA.COLUMNS
+ WHERE TABLE_NAME = 'Playlists'
+ AND COLUMN_NAME = 'EpisodeCount'
+ AND TABLE_SCHEMA = DATABASE()
+ """)
+ column_exists = cursor.fetchone()[0] > 0
+
+ if not column_exists:
+ cursor.execute("""
+ ALTER TABLE Playlists
+ ADD COLUMN EpisodeCount INT DEFAULT 0
+ """)
+ logger.info("Added EpisodeCount column to Playlists table (MySQL/MariaDB)")
+ else:
+ logger.info("EpisodeCount column already exists in Playlists table (MySQL/MariaDB)")
+
+ logger.info("episodecount column migration completed successfully")
+
+ except Exception as e:
+ logger.error(f"Error in migration 036: {e}")
+ raise
+ finally:
+ cursor.close()
+
+
+@register_migration("037", "fix_shared_episodes_schema", "Add missing SharedBy and SharedWith columns to SharedEpisodes table", requires=["009"])
+def migration_037_fix_shared_episodes_schema(conn, db_type: str):
+ """Add missing SharedBy and SharedWith columns to SharedEpisodes table
+
+ Old schema had: EpisodeID, UrlKey, ExpirationDate
+ New schema needs: EpisodeID, SharedBy, SharedWith, ShareCode, ExpirationDate
+ """
+ cursor = conn.cursor()
+
+ try:
+ logger.info("Starting SharedEpisodes schema fix migration")
+
+ if db_type == "postgresql":
+ # Check if sharedby column exists
+ cursor.execute("""
+ SELECT column_name FROM information_schema.columns
+ WHERE table_name = 'SharedEpisodes'
+ AND column_name = 'sharedby'
+ """)
+ sharedby_exists = len(cursor.fetchall()) > 0
+
+ if not sharedby_exists:
+ logger.info("Adding sharedby column to SharedEpisodes table (PostgreSQL)")
+ cursor.execute("""
+ ALTER TABLE "SharedEpisodes"
+ ADD COLUMN sharedby INTEGER NOT NULL DEFAULT 1
+ """)
+ conn.commit()
+
+ # Check if sharedwith column exists
+ cursor.execute("""
+ SELECT column_name FROM information_schema.columns
+ WHERE table_name = 'SharedEpisodes'
+ AND column_name = 'sharedwith'
+ """)
+ sharedwith_exists = len(cursor.fetchall()) > 0
+
+ if not sharedwith_exists:
+ logger.info("Adding sharedwith column to SharedEpisodes table (PostgreSQL)")
+ cursor.execute("""
+ ALTER TABLE "SharedEpisodes"
+ ADD COLUMN sharedwith INTEGER
+ """)
+ conn.commit()
+
+ # Check if sharecode column exists (might have been UrlKey)
+ cursor.execute("""
+ SELECT column_name FROM information_schema.columns
+ WHERE table_name = 'SharedEpisodes'
+ AND column_name = 'sharecode'
+ """)
+ sharecode_exists = len(cursor.fetchall()) > 0
+
+ if not sharecode_exists:
+ # Check if UrlKey exists
+ cursor.execute("""
+ SELECT column_name FROM information_schema.columns
+ WHERE table_name = 'SharedEpisodes'
+ AND column_name IN ('UrlKey', 'urlkey')
+ """)
+ urlkey_result = cursor.fetchall()
+
+ if urlkey_result:
+ urlkey_name = urlkey_result[0][0]
+ logger.info(f"Renaming {urlkey_name} to sharecode (PostgreSQL)")
+ cursor.execute(f"""
+ ALTER TABLE "SharedEpisodes"
+ RENAME COLUMN "{urlkey_name}" TO sharecode
+ """)
+ else:
+ logger.info("Adding sharecode column to SharedEpisodes table (PostgreSQL)")
+ cursor.execute("""
+ ALTER TABLE "SharedEpisodes"
+ ADD COLUMN sharecode TEXT UNIQUE
+ """)
+ conn.commit()
+
+ logger.info("SharedEpisodes schema fix completed (PostgreSQL)")
+
+ else: # MySQL/MariaDB
+ # Check if SharedBy column exists
+ cursor.execute("""
+ SELECT COUNT(*)
+ FROM INFORMATION_SCHEMA.COLUMNS
+ WHERE TABLE_NAME = 'SharedEpisodes'
+ AND COLUMN_NAME = 'SharedBy'
+ AND TABLE_SCHEMA = DATABASE()
+ """)
+ sharedby_exists = cursor.fetchone()[0] > 0
+
+ if not sharedby_exists:
+ logger.info("Adding SharedBy column to SharedEpisodes table (MySQL)")
+ cursor.execute("""
+ ALTER TABLE SharedEpisodes
+ ADD COLUMN SharedBy INT NOT NULL DEFAULT 1
+ """)
+ conn.commit()
+
+ # Check if SharedWith column exists
+ cursor.execute("""
+ SELECT COUNT(*)
+ FROM INFORMATION_SCHEMA.COLUMNS
+ WHERE TABLE_NAME = 'SharedEpisodes'
+ AND COLUMN_NAME = 'SharedWith'
+ AND TABLE_SCHEMA = DATABASE()
+ """)
+ sharedwith_exists = cursor.fetchone()[0] > 0
+
+ if not sharedwith_exists:
+ logger.info("Adding SharedWith column to SharedEpisodes table (MySQL)")
+ cursor.execute("""
+ ALTER TABLE SharedEpisodes
+ ADD COLUMN SharedWith INT
+ """)
+ conn.commit()
+
+ # Check if ShareCode column exists (might have been UrlKey)
+ cursor.execute("""
+ SELECT COUNT(*)
+ FROM INFORMATION_SCHEMA.COLUMNS
+ WHERE TABLE_NAME = 'SharedEpisodes'
+ AND COLUMN_NAME = 'ShareCode'
+ AND TABLE_SCHEMA = DATABASE()
+ """)
+ sharecode_exists = cursor.fetchone()[0] > 0
+
+ if not sharecode_exists:
+ # Check if UrlKey exists
+ cursor.execute("""
+ SELECT COUNT(*)
+ FROM INFORMATION_SCHEMA.COLUMNS
+ WHERE TABLE_NAME = 'SharedEpisodes'
+ AND COLUMN_NAME = 'UrlKey'
+ AND TABLE_SCHEMA = DATABASE()
+ """)
+ urlkey_exists = cursor.fetchone()[0] > 0
+
+ if urlkey_exists:
+ logger.info("Renaming UrlKey to ShareCode (MySQL)")
+ cursor.execute("""
+ ALTER TABLE SharedEpisodes
+ CHANGE COLUMN UrlKey ShareCode TEXT
+ """)
+ else:
+ logger.info("Adding ShareCode column to SharedEpisodes table (MySQL)")
+ cursor.execute("""
+ ALTER TABLE SharedEpisodes
+ ADD COLUMN ShareCode TEXT
+ """)
+ conn.commit()
+
+ logger.info("SharedEpisodes schema fix completed (MySQL)")
+
+ logger.info("SharedEpisodes schema fix migration completed successfully")
+
+ except Exception as e:
+ logger.error(f"Error in migration 037: {e}")
+ raise
+ finally:
+ cursor.close()
+
+
+@register_migration("107", "fix_gpodder_episode_actions_antennapod", "Fix existing GPodder episode actions to include Started and Total fields for AntennaPod compatibility", requires=["103"])
+def migration_107_fix_gpodder_episode_actions(conn, db_type: str):
+ """
+ Fix existing GPodder episode actions to be compatible with AntennaPod.
+ AntennaPod requires all play actions to have Started, Position, and Total fields.
+ This migration adds those fields by joining with the Episodes table to get duration.
+ """
+ cursor = conn.cursor()
+
+ try:
+ logger.info("Starting GPodder episode actions fix for AntennaPod compatibility...")
+
+ if db_type == "postgresql":
+ # First, count how many actions need fixing
+ cursor.execute("""
+ SELECT COUNT(*)
+ FROM "GpodderSyncEpisodeActions"
+ WHERE action = 'play'
+ AND (started IS NULL OR total IS NULL OR started < 0 OR total <= 0)
+ """)
+ count_result = cursor.fetchone()
+ actions_to_fix = count_result[0] if count_result else 0
+
+ logger.info(f"Found {actions_to_fix} play actions that need fixing (PostgreSQL)")
+
+ if actions_to_fix > 0:
+ # Update from Episodes table join
+ logger.info("Updating episode actions with duration from Episodes table...")
+ cursor.execute("""
+ UPDATE "GpodderSyncEpisodeActions" AS gsa
+ SET
+ started = 0,
+ total = e.episodeduration
+ FROM "Episodes" e
+ WHERE gsa.action = 'play'
+ AND gsa.episodeurl = e.episodeurl
+ AND e.episodeduration IS NOT NULL
+ AND e.episodeduration > 0
+ AND (gsa.started IS NULL OR gsa.total IS NULL OR gsa.started < 0 OR gsa.total <= 0)
+ """)
+ conn.commit()
+
+ # Fallback: use Position as Total for episodes not in Episodes table
+ logger.info("Updating remaining actions using Position as fallback for Total...")
+ cursor.execute("""
+ UPDATE "GpodderSyncEpisodeActions"
+ SET
+ started = 0,
+ total = COALESCE(position, 1)
+ WHERE action = 'play'
+ AND (started IS NULL OR total IS NULL OR started < 0 OR total <= 0)
+ AND position IS NOT NULL
+ AND position > 0
+ """)
+ conn.commit()
+
+ # Final cleanup: set minimal valid values for any remaining invalid actions
+ logger.info("Final cleanup: setting minimal valid values for remaining invalid actions...")
+ cursor.execute("""
+ UPDATE "GpodderSyncEpisodeActions"
+ SET
+ started = 0,
+ total = 1
+ WHERE action = 'play'
+ AND (started IS NULL OR total IS NULL OR started < 0 OR total <= 0)
+ """)
+ conn.commit()
+
+ # Verify the fix
+ cursor.execute("""
+ SELECT COUNT(*)
+ FROM "GpodderSyncEpisodeActions"
+ WHERE action = 'play'
+ AND (started IS NULL OR total IS NULL OR started < 0 OR total <= 0 OR position <= 0)
+ """)
+ remaining_result = cursor.fetchone()
+ remaining_broken = remaining_result[0] if remaining_result else 0
+
+ logger.info(f"Fixed {actions_to_fix - remaining_broken} episode actions (PostgreSQL)")
+ if remaining_broken > 0:
+ logger.warning(f"{remaining_broken} actions still have invalid fields - these may need manual review")
+ else:
+ logger.info("No actions need fixing (PostgreSQL)")
+
+ else: # MySQL/MariaDB
+ # First, count how many actions need fixing
+ cursor.execute("""
+ SELECT COUNT(*)
+ FROM GpodderSyncEpisodeActions
+ WHERE Action = 'play'
+ AND (Started IS NULL OR Total IS NULL OR Started < 0 OR Total <= 0)
+ """)
+ count_result = cursor.fetchone()
+ actions_to_fix = count_result[0] if count_result else 0
+
+ logger.info(f"Found {actions_to_fix} play actions that need fixing (MySQL)")
+
+ if actions_to_fix > 0:
+ # MySQL: Update using JOIN
+ logger.info("Updating episode actions with duration from Episodes table...")
+ cursor.execute("""
+ UPDATE GpodderSyncEpisodeActions AS gsa
+ LEFT JOIN Episodes e ON gsa.EpisodeURL = e.EpisodeURL
+ AND e.EpisodeDuration IS NOT NULL
+ AND e.EpisodeDuration > 0
+ SET
+ gsa.Started = 0,
+ gsa.Total = COALESCE(e.EpisodeDuration, gsa.Position, 1)
+ WHERE gsa.Action = 'play'
+ AND (gsa.Started IS NULL OR gsa.Total IS NULL OR gsa.Started < 0 OR gsa.Total <= 0)
+ """)
+ conn.commit()
+
+ # Verify the fix
+ cursor.execute("""
+ SELECT COUNT(*)
+ FROM GpodderSyncEpisodeActions
+ WHERE Action = 'play'
+ AND (Started IS NULL OR Total IS NULL OR Started < 0 OR Total <= 0 OR Position <= 0)
+ """)
+ remaining_result = cursor.fetchone()
+ remaining_broken = remaining_result[0] if remaining_result else 0
+
+ logger.info(f"Fixed {actions_to_fix - remaining_broken} episode actions (MySQL)")
+ if remaining_broken > 0:
+ logger.warning(f"{remaining_broken} actions still have invalid fields - these may need manual review")
+ else:
+ logger.info("No actions need fixing (MySQL)")
+
+ logger.info("GPodder episode actions fix migration completed successfully")
+ logger.info("AntennaPod should now be able to sync episode actions correctly")
+
+ except Exception as e:
+ logger.error(f"Error in migration 107: {e}")
+ raise
+ finally:
+ cursor.close()
+
+
if __name__ == "__main__":
# Register all migrations and run them
register_all_migrations()
diff --git a/database_functions/migrations.py b/database_functions/migrations.py
index 0ad1dad5..6a08787f 100644
--- a/database_functions/migrations.py
+++ b/database_functions/migrations.py
@@ -24,10 +24,14 @@
POSTGRES_AVAILABLE = False
try:
- import mysql.connector
+ import mariadb as mysql_connector
MYSQL_AVAILABLE = True
except ImportError:
- MYSQL_AVAILABLE = False
+ try:
+ import mysql.connector
+ MYSQL_AVAILABLE = True
+ except ImportError:
+ MYSQL_AVAILABLE = False
logger = logging.getLogger(__name__)
@@ -78,8 +82,17 @@ def get_connection(self):
self._connection = psycopg.connect(**self.connection_params)
elif self.db_type == 'mysql':
if not MYSQL_AVAILABLE:
- raise ImportError("mysql-connector-python not available for MySQL connections")
- self._connection = mysql.connector.connect(**self.connection_params)
+ raise ImportError("MariaDB/MySQL connector not available for MySQL connections")
+ # Use MariaDB connector parameters
+ mysql_params = self.connection_params.copy()
+ # Convert mysql.connector parameter names to mariadb parameter names
+ if 'connection_timeout' in mysql_params:
+ mysql_params['connect_timeout'] = mysql_params.pop('connection_timeout')
+ if 'charset' in mysql_params:
+ mysql_params.pop('charset') # MariaDB connector doesn't use charset parameter
+ if 'collation' in mysql_params:
+ mysql_params.pop('collation') # MariaDB connector doesn't use collation parameter
+ self._connection = mysql_connector.connect(**mysql_params)
return self._connection
@@ -304,6 +317,60 @@ def detect_existing_schema(self) -> List[str]:
applied.append("004")
logger.info("Detected existing schema for migration 004")
+ # Check for gpodder tables - if ANY exist, ALL gpodder migrations are applied
+ # (since they were created by the Go gpodder-api service and haven't changed)
+ gpodder_indicator_tables = ['"GpodderSyncMigrations"', '"GpodderSyncDeviceState"',
+ '"GpodderSyncSubscriptions"', '"GpodderSyncSettings"',
+ '"GpodderSessions"', '"GpodderSyncState"']
+ gpodder_migration_versions = ["100", "101", "102", "103", "104"]
+
+ gpodder_tables_exist = False
+ for table in gpodder_indicator_tables:
+ table_name = table.strip('"')
+ if self.db_type == 'postgresql':
+ cursor.execute("""
+ SELECT EXISTS (
+ SELECT FROM information_schema.tables
+ WHERE table_schema = 'public' AND table_name = %s
+ )
+ """, (table_name,))
+ else: # mysql
+ cursor.execute("""
+ SELECT COUNT(*)
+ FROM information_schema.tables
+ WHERE table_schema = DATABASE() AND table_name = %s
+ """, (table_name,))
+
+ if cursor.fetchone()[0]:
+ gpodder_tables_exist = True
+ break
+
+ if gpodder_tables_exist:
+ for version in gpodder_migration_versions:
+ if version not in applied:
+ applied.append(version)
+ logger.info(f"Detected existing gpodder tables, marking migration {version} as applied")
+
+ # Check for PeopleEpisodes_backup table separately (migration 104)
+ backup_table = "PeopleEpisodes_backup"
+ if self.db_type == 'postgresql':
+ cursor.execute("""
+ SELECT EXISTS (
+ SELECT FROM information_schema.tables
+ WHERE table_schema = 'public' AND table_name = %s
+ )
+ """, (backup_table,))
+ else: # mysql
+ cursor.execute("""
+ SELECT COUNT(*)
+ FROM information_schema.tables
+ WHERE table_schema = DATABASE() AND table_name = %s
+ """, (backup_table,))
+
+ if cursor.fetchone()[0] and "104" not in applied:
+ applied.append("104")
+ logger.info("Detected existing PeopleEpisodes_backup table, marking migration 104 as applied")
+
return applied
except Exception as e:
diff --git a/database_functions/mp3_metadata.py b/database_functions/mp3_metadata.py
deleted file mode 100644
index eb48725f..00000000
--- a/database_functions/mp3_metadata.py
+++ /dev/null
@@ -1,65 +0,0 @@
-from mutagen.easyid3 import EasyID3
-from mutagen.id3 import ID3, APIC, ID3NoHeaderError
-from mutagen.mp3 import MP3
-import mutagen
-import requests
-
-def add_podcast_metadata(file_path, metadata):
- """
- Add metadata to a downloaded podcast MP3 file.
-
- Args:
- file_path (str): Path to the MP3 file
- metadata (dict): Dictionary containing metadata with keys:
- - title: Episode title
- - artist: Podcast author/creator
- - album: Podcast name
- - date: Publication date
- - description: Episode description
- - artwork_url: URL to episode/podcast artwork
- """
- try:
- # Try to load existing ID3 tags, create them if they don't exist
- try:
- audio = EasyID3(file_path)
- except mutagen.id3.ID3NoHeaderError:
- audio = MP3(file_path)
- audio.add_tags()
- audio.save()
- audio = EasyID3(file_path)
-
- # Add basic text metadata using valid EasyID3 keys
- if 'title' in metadata:
- audio['title'] = metadata['title']
- if 'artist' in metadata:
- audio['artist'] = metadata['artist']
- if 'album' in metadata:
- audio['album'] = metadata['album']
- if 'date' in metadata:
- audio['date'] = metadata['date']
-
- # Save the text metadata
- audio.save()
-
- # Handle artwork separately (requires full ID3)
- if 'artwork_url' in metadata and metadata['artwork_url']:
- try:
- # Download artwork
- artwork_response = requests.get(metadata['artwork_url'])
- artwork_response.raise_for_status()
-
- # Add artwork to the file
- audio = ID3(file_path)
- audio.add(APIC(
- encoding=3, # UTF-8
- mime='image/jpeg', # Assume JPEG
- type=3, # Cover image
- desc='Cover',
- data=artwork_response.content
- ))
- audio.save()
- except Exception as e:
- print(f"Failed to add artwork: {str(e)}")
-
- except Exception as e:
- print(f"Failed to add metadata to {file_path}: {str(e)}")
diff --git a/database_functions/oidc_state_manager.py b/database_functions/oidc_state_manager.py
deleted file mode 100644
index a38d2840..00000000
--- a/database_functions/oidc_state_manager.py
+++ /dev/null
@@ -1,29 +0,0 @@
-from typing import Optional
-from database_functions.valkey_client import valkey_client
-
-class OIDCStateManager:
- def store_state(self, state: str, client_id: str) -> bool:
- """Store OIDC state and client_id with 10 minute expiration"""
- try:
- key = f"oidc_state:{state}"
- success = valkey_client.set(key, client_id)
- if success:
- valkey_client.expire(key, 600) # 10 minutes
- return success
- except Exception as e:
- print(f"Error storing OIDC state: {e}")
- return False
-
- def get_client_id(self, state: str) -> Optional[str]:
- """Get client_id for state and delete it after retrieval"""
- try:
- key = f"oidc_state:{state}"
- client_id = valkey_client.get(key)
- if client_id:
- valkey_client.delete(key)
- return client_id
- except Exception as e:
- print(f"Error getting OIDC state: {e}")
- return None
-
-oidc_state_manager = OIDCStateManager()
diff --git a/database_functions/tasks.py b/database_functions/tasks.py
index 8eec4f58..1d7a388c 100644
--- a/database_functions/tasks.py
+++ b/database_functions/tasks.py
@@ -307,14 +307,16 @@ def get_direct_db_connection():
conninfo = f"host={db_host} port={db_port} user={db_user} password={db_password} dbname={db_name}"
return psycopg.connect(conninfo)
else: # Default to MariaDB/MySQL
- import mysql.connector
- return mysql.connector.connect(
+ try:
+ import mariadb as mysql_connector
+ except ImportError:
+ import mysql.connector
+ return mysql_connector.connect(
host=db_host,
port=db_port,
user=db_user,
password=db_password,
- database=db_name,
- collation="utf8mb4_general_ci"
+ database=db_name
)
def close_direct_db_connection(cnx):
diff --git a/database_functions/validate_database.py b/database_functions/validate_database.py
new file mode 100644
index 00000000..c0ab82f7
--- /dev/null
+++ b/database_functions/validate_database.py
@@ -0,0 +1,778 @@
+#!/usr/bin/env python3
+"""
+Database Validator for PinePods
+
+This script validates that an existing database matches the expected schema
+by using the migration system as the source of truth.
+
+Usage:
+ python validate_database.py --db-type mysql --db-host localhost --db-port 3306 --db-user root --db-password pass --db-name pinepods_database
+ python validate_database.py --db-type postgresql --db-host localhost --db-port 5432 --db-user postgres --db-password pass --db-name pinepods_database
+"""
+
+import argparse
+import sys
+import os
+import tempfile
+import logging
+from typing import Dict, List, Set, Tuple, Any, Optional
+from dataclasses import dataclass
+import importlib.util
+
+# Add the parent directory to path so we can import database_functions
+parent_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+sys.path.insert(0, parent_dir)
+sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
+
+try:
+ import mysql.connector
+ MYSQL_AVAILABLE = True
+except ImportError:
+ MYSQL_AVAILABLE = False
+
+try:
+ import psycopg
+ POSTGRESQL_AVAILABLE = True
+except ImportError:
+ POSTGRESQL_AVAILABLE = False
+
+from database_functions.migrations import get_migration_manager
+
+
+@dataclass
+class TableInfo:
+ """Information about a database table"""
+ name: str
+ columns: Dict[str, Dict[str, Any]]
+ indexes: Dict[str, Dict[str, Any]]
+ constraints: Dict[str, Dict[str, Any]]
+
+
+@dataclass
+class ValidationResult:
+ """Result of database validation"""
+ is_valid: bool
+ missing_tables: List[str]
+ extra_tables: List[str]
+ table_differences: Dict[str, Dict[str, Any]]
+ missing_indexes: List[Tuple[str, str]] # (table, index)
+ extra_indexes: List[Tuple[str, str]]
+ missing_constraints: List[Tuple[str, str]] # (table, constraint)
+ extra_constraints: List[Tuple[str, str]]
+ column_differences: Dict[str, Dict[str, Dict[str, Any]]] # table -> column -> differences
+
+
+class DatabaseInspector:
+ """Base class for database inspection"""
+
+ def __init__(self, connection):
+ self.connection = connection
+
+ def get_tables(self) -> Set[str]:
+ """Get all table names"""
+ raise NotImplementedError
+
+ def get_table_info(self, table_name: str) -> TableInfo:
+ """Get detailed information about a table"""
+ raise NotImplementedError
+
+ def get_all_table_info(self) -> Dict[str, TableInfo]:
+ """Get information about all tables"""
+ tables = {}
+ for table_name in self.get_tables():
+ tables[table_name] = self.get_table_info(table_name)
+ return tables
+
+
+class MySQLInspector(DatabaseInspector):
+ """MySQL database inspector"""
+
+ def get_tables(self) -> Set[str]:
+ cursor = self.connection.cursor()
+ cursor.execute("SHOW TABLES")
+ tables = {row[0] for row in cursor.fetchall()}
+ cursor.close()
+ return tables
+
+ def get_table_info(self, table_name: str) -> TableInfo:
+ cursor = self.connection.cursor(dictionary=True)
+
+ # Get column information
+ cursor.execute(f"DESCRIBE `{table_name}`")
+ columns = {}
+ for row in cursor.fetchall():
+ columns[row['Field']] = {
+ 'type': row['Type'],
+ 'null': row['Null'],
+ 'key': row['Key'],
+ 'default': row['Default'],
+ 'extra': row['Extra']
+ }
+
+ # Get index information
+ cursor.execute(f"SHOW INDEX FROM `{table_name}`")
+ indexes = {}
+ for row in cursor.fetchall():
+ index_name = row['Key_name']
+ if index_name not in indexes:
+ indexes[index_name] = {
+ 'columns': [],
+ 'unique': not row['Non_unique'],
+ 'type': row['Index_type']
+ }
+ indexes[index_name]['columns'].append(row['Column_name'])
+
+ # Get constraint information (foreign keys, etc.)
+ cursor.execute(f"""
+ SELECT kcu.CONSTRAINT_NAME, tc.CONSTRAINT_TYPE, kcu.COLUMN_NAME,
+ kcu.REFERENCED_TABLE_NAME, kcu.REFERENCED_COLUMN_NAME
+ FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE kcu
+ JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS tc
+ ON kcu.CONSTRAINT_NAME = tc.CONSTRAINT_NAME
+ AND kcu.TABLE_SCHEMA = tc.TABLE_SCHEMA
+ WHERE kcu.TABLE_SCHEMA = DATABASE() AND kcu.TABLE_NAME = %s
+ AND kcu.REFERENCED_TABLE_NAME IS NOT NULL
+ """, (table_name,))
+
+ constraints = {}
+ for row in cursor.fetchall():
+ constraint_name = row['CONSTRAINT_NAME']
+ constraints[constraint_name] = {
+ 'type': 'FOREIGN KEY',
+ 'column': row['COLUMN_NAME'],
+ 'referenced_table': row['REFERENCED_TABLE_NAME'],
+ 'referenced_column': row['REFERENCED_COLUMN_NAME']
+ }
+
+ cursor.close()
+ return TableInfo(table_name, columns, indexes, constraints)
+
+
+class PostgreSQLInspector(DatabaseInspector):
+ """PostgreSQL database inspector"""
+
+ def get_tables(self) -> Set[str]:
+ cursor = self.connection.cursor()
+ cursor.execute("""
+ SELECT table_name
+ FROM information_schema.tables
+ WHERE table_schema = 'public' AND table_type = 'BASE TABLE'
+ """)
+ tables = {row[0] for row in cursor.fetchall()}
+ cursor.close()
+ return tables
+
+ def get_table_info(self, table_name: str) -> TableInfo:
+ cursor = self.connection.cursor()
+
+ # Get column information
+ cursor.execute("""
+ SELECT column_name, data_type, is_nullable, column_default,
+ character_maximum_length, numeric_precision, numeric_scale
+ FROM information_schema.columns
+ WHERE table_schema = 'public' AND table_name = %s
+ ORDER BY ordinal_position
+ """, (table_name,))
+
+ columns = {}
+ for row in cursor.fetchall():
+ col_name, data_type, is_nullable, default, max_length, precision, scale = row
+ type_str = data_type
+ if max_length:
+ type_str += f"({max_length})"
+ elif precision:
+ if scale:
+ type_str += f"({precision},{scale})"
+ else:
+ type_str += f"({precision})"
+
+ columns[col_name] = {
+ 'type': type_str,
+ 'null': is_nullable,
+ 'default': default,
+ 'max_length': max_length,
+ 'precision': precision,
+ 'scale': scale
+ }
+
+ # Get index information
+ cursor.execute("""
+ SELECT i.relname as index_name,
+ array_agg(a.attname ORDER BY c.ordinality) as columns,
+ ix.indisunique as is_unique,
+ ix.indisprimary as is_primary
+ FROM pg_class t
+ JOIN pg_index ix ON t.oid = ix.indrelid
+ JOIN pg_class i ON i.oid = ix.indexrelid
+ JOIN unnest(ix.indkey) WITH ORDINALITY c(colnum, ordinality) ON true
+ JOIN pg_attribute a ON a.attrelid = t.oid AND a.attnum = c.colnum
+ WHERE t.relname = %s AND t.relkind = 'r'
+ GROUP BY i.relname, ix.indisunique, ix.indisprimary
+ """, (table_name,))
+
+ indexes = {}
+ for row in cursor.fetchall():
+ index_name, columns_list, is_unique, is_primary = row
+ indexes[index_name] = {
+ 'columns': columns_list,
+ 'unique': is_unique,
+ 'primary': is_primary
+ }
+
+ # Get constraint information
+ cursor.execute("""
+ SELECT con.conname as constraint_name,
+ con.contype as constraint_type,
+ array_agg(att.attname) as columns,
+ cl.relname as referenced_table,
+ array_agg(att2.attname) as referenced_columns
+ FROM pg_constraint con
+ JOIN pg_class t ON con.conrelid = t.oid
+ JOIN pg_attribute att ON att.attrelid = t.oid AND att.attnum = ANY(con.conkey)
+ LEFT JOIN pg_class cl ON con.confrelid = cl.oid
+ LEFT JOIN pg_attribute att2 ON att2.attrelid = cl.oid AND att2.attnum = ANY(con.confkey)
+ WHERE t.relname = %s
+ GROUP BY con.conname, con.contype, cl.relname
+ """, (table_name,))
+
+ constraints = {}
+ for row in cursor.fetchall():
+ constraint_name, constraint_type, columns_list, ref_table, ref_columns = row
+ constraints[constraint_name] = {
+ 'type': constraint_type,
+ 'columns': columns_list,
+ 'referenced_table': ref_table,
+ 'referenced_columns': ref_columns
+ }
+
+ cursor.close()
+ return TableInfo(table_name, columns, indexes, constraints)
+
+
+class DatabaseValidator:
+ """Main database validator class"""
+
+ def __init__(self, db_type: str, db_config: Dict[str, Any]):
+ self.db_type = db_type.lower()
+ # Normalize mariadb to mysql since they use the same connector
+ if self.db_type == 'mariadb':
+ self.db_type = 'mysql'
+ self.db_config = db_config
+ self.logger = logging.getLogger(__name__)
+
+ def create_test_database(self) -> Tuple[Any, str]:
+ """Create a temporary database and run all migrations"""
+ if self.db_type == 'mysql':
+ return self._create_mysql_test_db()
+ elif self.db_type == 'postgresql':
+ return self._create_postgresql_test_db()
+ else:
+ raise ValueError(f"Unsupported database type: {self.db_type}")
+
+ def _create_mysql_test_db(self) -> Tuple[Any, str]:
+ """Create MySQL test database"""
+ if not MYSQL_AVAILABLE:
+ raise ImportError("mysql-connector-python is required for MySQL validation")
+
+ # Create temporary database name
+ import uuid
+ test_db_name = f"pinepods_test_{uuid.uuid4().hex[:8]}"
+
+ # Connect to MySQL server
+ config = self.db_config.copy()
+ config.pop('database', None) # Remove database from config
+ config['use_pure'] = True # Use pure Python implementation to avoid auth plugin issues
+
+ conn = mysql.connector.connect(**config)
+ cursor = conn.cursor()
+
+ try:
+ # Create test database
+ cursor.execute(f"CREATE DATABASE `{test_db_name}` CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci")
+ cursor.execute(f"USE `{test_db_name}`")
+ cursor.close()
+
+ # Run all migrations
+ self._run_migrations(conn, 'mysql')
+
+ # Create a fresh connection to the test database for schema inspection
+ config['database'] = test_db_name
+ test_conn = mysql.connector.connect(**config)
+
+ # Close the migration connection
+ conn.close()
+
+ return test_conn, test_db_name
+
+ except Exception as e:
+ if cursor:
+ cursor.close()
+ if conn:
+ conn.close()
+ raise e
+
+ def _create_postgresql_test_db(self) -> Tuple[Any, str]:
+ """Create PostgreSQL test database"""
+ if not POSTGRESQL_AVAILABLE:
+ raise ImportError("psycopg is required for PostgreSQL validation")
+
+ # Create temporary database name
+ import uuid
+ test_db_name = f"pinepods_test_{uuid.uuid4().hex[:8]}"
+
+ # Connect to PostgreSQL server
+ config = self.db_config.copy()
+ config.pop('dbname', None) # Remove database from config
+ config['dbname'] = 'postgres' # Connect to default database
+
+ conn = psycopg.connect(**config)
+ conn.autocommit = True
+ cursor = conn.cursor()
+
+ try:
+ # Create test database
+ cursor.execute(f'CREATE DATABASE "{test_db_name}"')
+ cursor.close()
+ conn.close()
+
+ # Connect to the new test database
+ config['dbname'] = test_db_name
+ test_conn = psycopg.connect(**config)
+ test_conn.autocommit = True
+
+ # Run all migrations
+ self._run_migrations(test_conn, 'postgresql')
+
+ return test_conn, test_db_name
+
+ except Exception as e:
+ cursor.close()
+ conn.close()
+ raise e
+
+ def _run_migrations(self, conn: Any, db_type: str):
+ """Run all migrations on the test database using existing migration system"""
+ # Set environment variables for the migration manager
+ import os
+ original_env = {}
+
+ try:
+ # Backup original environment
+ for key in ['DB_TYPE', 'DB_HOST', 'DB_PORT', 'DB_USER', 'DB_PASSWORD', 'DB_NAME']:
+ original_env[key] = os.environ.get(key)
+
+ # Set environment for test database
+ if db_type == 'mysql':
+ os.environ['DB_TYPE'] = 'mysql'
+ os.environ['DB_HOST'] = 'localhost' # We'll override the connection
+ os.environ['DB_PORT'] = '3306'
+ os.environ['DB_USER'] = 'test'
+ os.environ['DB_PASSWORD'] = 'test'
+ os.environ['DB_NAME'] = 'test'
+ else:
+ os.environ['DB_TYPE'] = 'postgresql'
+ os.environ['DB_HOST'] = 'localhost'
+ os.environ['DB_PORT'] = '5432'
+ os.environ['DB_USER'] = 'test'
+ os.environ['DB_PASSWORD'] = 'test'
+ os.environ['DB_NAME'] = 'test'
+
+ # Import and register migrations
+ import database_functions.migration_definitions
+
+ # Get migration manager and override its connection
+ manager = get_migration_manager()
+ manager._connection = conn
+
+ # Run all migrations
+ success = manager.run_migrations()
+ if not success:
+ raise RuntimeError("Failed to apply migrations")
+
+ finally:
+ # Restore original environment
+ for key, value in original_env.items():
+ if value is not None:
+ os.environ[key] = value
+ elif key in os.environ:
+ del os.environ[key]
+
+ def validate_database(self) -> ValidationResult:
+ """Validate the actual database against the expected schema"""
+ # Create test database with perfect schema
+ test_conn, test_db_name = self.create_test_database()
+
+ try:
+ # Connect to actual database
+ actual_conn = self._connect_to_actual_database()
+
+ try:
+ # Get schema information from both databases
+ if self.db_type == 'mysql':
+ expected_inspector = MySQLInspector(test_conn)
+ actual_inspector = MySQLInspector(actual_conn)
+ # Extract schemas
+ expected_schema = expected_inspector.get_all_table_info()
+ actual_schema = actual_inspector.get_all_table_info()
+ else:
+ # For PostgreSQL, create fresh connection for expected schema since migration manager closes it
+ fresh_test_conn = psycopg.connect(
+ host=self.db_config['host'],
+ port=self.db_config['port'],
+ user=self.db_config['user'],
+ password=self.db_config['password'],
+ dbname=test_db_name
+ )
+ fresh_test_conn.autocommit = True
+
+ try:
+ expected_inspector = PostgreSQLInspector(fresh_test_conn)
+ actual_inspector = PostgreSQLInspector(actual_conn)
+
+ # Extract schemas
+ expected_schema = expected_inspector.get_all_table_info()
+ actual_schema = actual_inspector.get_all_table_info()
+ finally:
+ fresh_test_conn.close()
+
+ # DEBUG: Print what we're actually comparing
+ print(f"\n🔍 DEBUG: Expected schema has {len(expected_schema)} tables:")
+ for table in sorted(expected_schema.keys()):
+ cols = list(expected_schema[table].columns.keys())
+ print(f" {table}: {len(cols)} columns - {', '.join(cols[:5])}{'...' if len(cols) > 5 else ''}")
+
+ print(f"\n🔍 DEBUG: Actual schema has {len(actual_schema)} tables:")
+ for table in sorted(actual_schema.keys()):
+ cols = list(actual_schema[table].columns.keys())
+ print(f" {table}: {len(cols)} columns - {', '.join(cols[:5])}{'...' if len(cols) > 5 else ''}")
+
+ # Check specifically for Playlists table
+ if 'Playlists' in expected_schema and 'Playlists' in actual_schema:
+ exp_cols = set(expected_schema['Playlists'].columns.keys())
+ act_cols = set(actual_schema['Playlists'].columns.keys())
+ print(f"\n🔍 DEBUG: Playlists comparison:")
+ print(f" Expected columns: {sorted(exp_cols)}")
+ print(f" Actual columns: {sorted(act_cols)}")
+ print(f" Missing from actual: {sorted(exp_cols - act_cols)}")
+ print(f" Extra in actual: {sorted(act_cols - exp_cols)}")
+
+ # Compare schemas
+ result = self._compare_schemas(expected_schema, actual_schema)
+
+ return result
+
+ finally:
+ actual_conn.close()
+
+ finally:
+ # Clean up test database - this will close test_conn
+ self._cleanup_test_database(test_conn, test_db_name)
+
+ def _connect_to_actual_database(self) -> Any:
+ """Connect to the actual database"""
+ if self.db_type == 'mysql':
+ config = self.db_config.copy()
+ # Ensure autocommit is enabled for MySQL
+ config['autocommit'] = True
+ config['use_pure'] = True # Use pure Python implementation to avoid auth plugin issues
+ return mysql.connector.connect(**config)
+ else:
+ return psycopg.connect(**self.db_config)
+
+ def _cleanup_test_database(self, test_conn: Any, test_db_name: str):
+ """Clean up the test database"""
+ try:
+ # Close the test connection first
+ if test_conn:
+ test_conn.close()
+
+ if self.db_type == 'mysql':
+ config = self.db_config.copy()
+ config.pop('database', None)
+ config['use_pure'] = True # Use pure Python implementation to avoid auth plugin issues
+ cleanup_conn = mysql.connector.connect(**config)
+ cursor = cleanup_conn.cursor()
+ cursor.execute(f"DROP DATABASE IF EXISTS `{test_db_name}`")
+ cursor.close()
+ cleanup_conn.close()
+ else:
+ config = self.db_config.copy()
+ config.pop('dbname', None)
+ config['dbname'] = 'postgres'
+ cleanup_conn = psycopg.connect(**config)
+ cleanup_conn.autocommit = True
+ cursor = cleanup_conn.cursor()
+ cursor.execute(f'DROP DATABASE IF EXISTS "{test_db_name}"')
+ cursor.close()
+ cleanup_conn.close()
+ except Exception as e:
+ self.logger.warning(f"Failed to clean up test database {test_db_name}: {e}")
+
+ def _compare_schemas(self, expected: Dict[str, TableInfo], actual: Dict[str, TableInfo]) -> ValidationResult:
+ """Compare expected and actual database schemas"""
+ expected_tables = set(expected.keys())
+ actual_tables = set(actual.keys())
+
+ missing_tables = list(expected_tables - actual_tables)
+ extra_tables = list(actual_tables - expected_tables)
+
+ table_differences = {}
+ missing_indexes = []
+ extra_indexes = []
+ missing_constraints = []
+ extra_constraints = []
+ column_differences = {}
+
+ # Compare common tables
+ common_tables = expected_tables & actual_tables
+ for table_name in common_tables:
+ expected_table = expected[table_name]
+ actual_table = actual[table_name]
+
+ # Compare columns
+ table_col_diffs = self._compare_columns(expected_table.columns, actual_table.columns)
+ if table_col_diffs:
+ column_differences[table_name] = table_col_diffs
+
+ # Compare indexes
+ expected_indexes = set(expected_table.indexes.keys())
+ actual_indexes = set(actual_table.indexes.keys())
+
+ for missing_idx in expected_indexes - actual_indexes:
+ missing_indexes.append((table_name, missing_idx))
+ for extra_idx in actual_indexes - expected_indexes:
+ extra_indexes.append((table_name, extra_idx))
+
+ # Compare constraints
+ expected_constraints = set(expected_table.constraints.keys())
+ actual_constraints = set(actual_table.constraints.keys())
+
+ for missing_const in expected_constraints - actual_constraints:
+ missing_constraints.append((table_name, missing_const))
+ for extra_const in actual_constraints - expected_constraints:
+ extra_constraints.append((table_name, extra_const))
+
+ # Only fail on critical issues:
+ # - Missing tables (CRITICAL)
+ # - Missing columns (CRITICAL)
+ # Extra tables, extra columns, and type differences are warnings only
+ critical_issues = []
+ critical_issues.extend(missing_tables)
+
+ # Check for missing columns (critical) - but only in expected tables
+ for table, col_diffs in column_differences.items():
+ # Skip extra tables entirely - they shouldn't be validated
+ if table in extra_tables:
+ continue
+
+ for col, diff in col_diffs.items():
+ if diff['status'] == 'missing':
+ critical_issues.append(f"missing column {col} in table {table}")
+
+ is_valid = len(critical_issues) == 0
+
+ return ValidationResult(
+ is_valid=is_valid,
+ missing_tables=missing_tables,
+ extra_tables=extra_tables,
+ table_differences=table_differences,
+ missing_indexes=missing_indexes,
+ extra_indexes=extra_indexes,
+ missing_constraints=missing_constraints,
+ extra_constraints=extra_constraints,
+ column_differences=column_differences
+ )
+
+ def _compare_columns(self, expected: Dict[str, Dict[str, Any]], actual: Dict[str, Dict[str, Any]]) -> Dict[str, Dict[str, Any]]:
+ """Compare column definitions between expected and actual"""
+ differences = {}
+
+ expected_cols = set(expected.keys())
+ actual_cols = set(actual.keys())
+
+ # Missing columns
+ for missing_col in expected_cols - actual_cols:
+ differences[missing_col] = {'status': 'missing', 'expected': expected[missing_col]}
+
+ # Extra columns
+ for extra_col in actual_cols - expected_cols:
+ differences[extra_col] = {'status': 'extra', 'actual': actual[extra_col]}
+
+ # Different columns
+ for col_name in expected_cols & actual_cols:
+ expected_col = expected[col_name]
+ actual_col = actual[col_name]
+
+ col_diffs = {}
+ for key in expected_col:
+ if key in actual_col and expected_col[key] != actual_col[key]:
+ col_diffs[key] = {'expected': expected_col[key], 'actual': actual_col[key]}
+
+ if col_diffs:
+ differences[col_name] = {'status': 'different', 'differences': col_diffs}
+
+ return differences
+
+
+def print_validation_report(result: ValidationResult):
+ """Print a detailed validation report"""
+ print("=" * 80)
+ print("DATABASE VALIDATION REPORT")
+ print("=" * 80)
+
+ # Count critical vs warning issues
+ critical_issues = []
+ warning_issues = []
+
+ # Missing tables are critical
+ critical_issues.extend(result.missing_tables)
+
+ # Missing columns are critical, others are warnings
+ for table, col_diffs in result.column_differences.items():
+ for col, diff in col_diffs.items():
+ if diff['status'] == 'missing':
+ critical_issues.append(f"Missing column {col} in table {table}")
+ else:
+ warning_issues.append((table, col, diff))
+
+ # Extra tables are warnings
+ warning_issues.extend([('EXTRA_TABLE', table, None) for table in result.extra_tables])
+
+ if result.is_valid:
+ if warning_issues:
+ print("✅ DATABASE IS VALID - No critical issues found!")
+ print("⚠️ Some warnings exist but don't affect functionality")
+ else:
+ print("✅ DATABASE IS PERFECT - All checks passed!")
+ else:
+ print("❌ DATABASE VALIDATION FAILED - Critical issues found")
+
+ print()
+
+ # Show critical issues
+ if critical_issues:
+ print("🔴 CRITICAL ISSUES (MUST BE FIXED):")
+ if result.missing_tables:
+ print(" Missing Tables:")
+ for table in result.missing_tables:
+ print(f" - {table}")
+
+ # Show missing columns
+ for table, col_diffs in result.column_differences.items():
+ missing_cols = [col for col, diff in col_diffs.items() if diff['status'] == 'missing']
+ if missing_cols:
+ print(f" Missing Columns in {table}:")
+ for col in missing_cols:
+ print(f" - {col}")
+ print()
+
+ # Show warnings
+ if warning_issues:
+ print("⚠️ WARNINGS (ACCEPTABLE DIFFERENCES):")
+
+ if result.extra_tables:
+ print(" Extra Tables (ignored):")
+ for table in result.extra_tables:
+ print(f" - {table}")
+
+ # Show column warnings
+ for table, col_diffs in result.column_differences.items():
+ table_warnings = []
+ for col, diff in col_diffs.items():
+ if diff['status'] == 'extra':
+ table_warnings.append(f"Extra column: {col}")
+ elif diff['status'] == 'different':
+ details = []
+ for key, values in diff['differences'].items():
+ details.append(f"{key}: {values}")
+ table_warnings.append(f"Different column {col}: {', '.join(details)}")
+
+ if table_warnings:
+ print(f" Table {table}:")
+ for warning in table_warnings:
+ print(f" - {warning}")
+ print()
+
+ if result.missing_indexes:
+ print("🟡 MISSING INDEXES:")
+ for table, index in result.missing_indexes:
+ print(f" - {table}.{index}")
+ print()
+
+ if result.extra_indexes:
+ print("🟡 EXTRA INDEXES:")
+ for table, index in result.extra_indexes:
+ print(f" - {table}.{index}")
+ print()
+
+ if result.missing_constraints:
+ print("🟡 MISSING CONSTRAINTS:")
+ for table, constraint in result.missing_constraints:
+ print(f" - {table}.{constraint}")
+ print()
+
+ if result.extra_constraints:
+ print("🟡 EXTRA CONSTRAINTS:")
+ for table, constraint in result.extra_constraints:
+ print(f" - {table}.{constraint}")
+ print()
+
+
+def main():
+ """Main function"""
+ parser = argparse.ArgumentParser(description='Validate PinePods database schema')
+ parser.add_argument('--db-type', required=True, choices=['mysql', 'mariadb', 'postgresql'], help='Database type')
+ parser.add_argument('--db-host', required=True, help='Database host')
+ parser.add_argument('--db-port', required=True, type=int, help='Database port')
+ parser.add_argument('--db-user', required=True, help='Database user')
+ parser.add_argument('--db-password', required=True, help='Database password')
+ parser.add_argument('--db-name', required=True, help='Database name')
+ parser.add_argument('--verbose', '-v', action='store_true', help='Enable verbose logging')
+
+ args = parser.parse_args()
+
+ # Set up logging
+ level = logging.DEBUG if args.verbose else logging.INFO
+ logging.basicConfig(level=level, format='%(asctime)s - %(levelname)s - %(message)s')
+
+ # Build database config
+ if args.db_type in ['mysql', 'mariadb']:
+ db_config = {
+ 'host': args.db_host,
+ 'port': args.db_port,
+ 'user': args.db_user,
+ 'password': args.db_password,
+ 'database': args.db_name,
+ 'charset': 'utf8mb4',
+ 'collation': 'utf8mb4_unicode_ci'
+ }
+ else: # postgresql
+ db_config = {
+ 'host': args.db_host,
+ 'port': args.db_port,
+ 'user': args.db_user,
+ 'password': args.db_password,
+ 'dbname': args.db_name
+ }
+
+ try:
+ # Create validator and run validation
+ validator = DatabaseValidator(args.db_type, db_config)
+ result = validator.validate_database()
+
+ # Print report
+ print_validation_report(result)
+
+ # Exit with appropriate code
+ sys.exit(0 if result.is_valid else 1)
+
+ except Exception as e:
+ logging.error(f"Validation failed with error: {e}")
+ if args.verbose:
+ import traceback
+ traceback.print_exc()
+ sys.exit(2)
+
+
+if __name__ == '__main__':
+ main()
\ No newline at end of file
diff --git a/database_functions/valkey_client.py b/database_functions/valkey_client.py
deleted file mode 100644
index 09970a61..00000000
--- a/database_functions/valkey_client.py
+++ /dev/null
@@ -1,64 +0,0 @@
-import os
-import redis
-from redis.exceptions import RedisError
-
-class ValkeyClient:
- def __init__(self):
- self.host = os.environ.get("VALKEY_HOST", "localhost")
- self.port = int(os.environ.get("VALKEY_PORT", 6379))
- self.client = None
-
- def connect(self):
- try:
- self.client = redis.Redis(
- host=self.host,
- port=self.port,
- decode_responses=True,
- health_check_interval=10,
- socket_connect_timeout=5,
- retry_on_timeout=True,
- socket_keepalive=True
- )
- self.client.ping() # Test the connection
- print("Successfully connected to Valkey")
- except RedisError as e:
- print(f"Failed to connect to Valkey: {e}")
- self.client = None
-
- def get(self, key):
- if not self.client:
- self.connect()
- try:
- return self.client.get(key)
- except RedisError as e:
- print(f"Error getting key from Valkey: {e}")
- return None
-
- def set(self, key, value):
- if not self.client:
- self.connect()
- try:
- return self.client.set(key, value)
- except RedisError as e:
- print(f"Error setting key in Valkey: {e}")
- return False
-
- def delete(self, key):
- if not self.client:
- self.connect()
- try:
- return self.client.delete(key)
- except RedisError as e:
- print(f"Error deleting key from Valkey: {e}")
- return False
-
- def expire(self, key: str, seconds: int) -> bool:
- if not self.client:
- self.connect()
- try:
- return bool(self.client.expire(key, seconds)) if self.client else False
- except RedisError as e:
- print(f"Error setting expiry in Valkey: {e}")
- return False
-
-valkey_client = ValkeyClient()
diff --git a/database_functions/websocket_broadcaster.py b/database_functions/websocket_broadcaster.py
deleted file mode 100644
index 0e6860f1..00000000
--- a/database_functions/websocket_broadcaster.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# websocket_broadcaster.py - Simple HTTP-based WebSocket broadcaster
-import requests
-import logging
-import json
-
-class WebSocketBroadcaster:
- def __init__(self):
- # Hard-coded to use the internal container port
- self.server_url = "http://localhost:8032"
-
- def broadcast_task_update(self, user_id, task_data, api_key):
- """Send task update via HTTP to the broadcast endpoint"""
- try:
- # Prepare the message
- message = {
- "event": "update",
- "task": task_data
- }
-
- # Send to the broadcast endpoint
- response = requests.post(
- f"{self.server_url}/api/tasks/broadcast",
- json={"user_id": user_id, "message": message},
- headers={"Api-Key": api_key},
- timeout=2 # Short timeout to avoid blocking
- )
-
- # Check result
- if response.status_code == 200:
- print(f"Successfully sent update for task {task_data.get('task_id')}")
- return True
- else:
- print(f"Failed to send update: {response.status_code} - {response.text}")
- return False
- except Exception as e:
- print(f"Error sending broadcast: {str(e)}")
- return False
-
-# Create a singleton instance
-broadcaster = WebSocketBroadcaster()
diff --git a/database_functions/youtube.py b/database_functions/youtube.py
deleted file mode 100644
index b9d31590..00000000
--- a/database_functions/youtube.py
+++ /dev/null
@@ -1,275 +0,0 @@
-from typing import Dict, Optional
-from yt_dlp import YoutubeDL
-from fastapi import HTTPException
-import logging
-import os
-import datetime
-from datetime import timedelta
-import logging
-from bs4 import BeautifulSoup
-import time
-import random
-from database_functions import functions
-
-
-async def get_channel_info(channel_id: str) -> Dict:
- """
- Get YouTube channel info using yt-dlp
- """
- ydl_opts = {
- 'quiet': True,
- 'extract_flat': True,
- 'no_warnings': True,
- 'playlist_items': '0', # Just get channel info, not videos
- 'socket_timeout': 30, # 30 second socket timeout
- 'timeout': 60, # 60 second overall timeout
- }
- print('in get channel info')
-
- try:
- with YoutubeDL(ydl_opts) as ydl:
- channel_url = f"https://www.youtube.com/channel/{channel_id}"
- channel_info = ydl.extract_info(
- channel_url,
- download=False,
- process=False
- )
- print(f'get info {channel_info}')
-
- # Get avatar URL
- thumbnail_url = None
- if channel_info and channel_info.get('thumbnails'):
- avatar_thumbnails = [t for t in channel_info['thumbnails']
- if t.get('id', '').startswith('avatar')]
-
- if avatar_thumbnails:
- thumbnail_url = avatar_thumbnails[-1]['url']
- else:
- avatar_thumbnails = [t for t in channel_info['thumbnails']
- if 'avatar' in t.get('url', '').lower()]
- if avatar_thumbnails:
- thumbnail_url = avatar_thumbnails[-1]['url']
- else:
- thumbnail_url = channel_info['thumbnails'][0]['url']
- print('did a bunch of thumbnail stuff')
- return {
- 'channel_id': channel_id,
- 'name': channel_info.get('channel', '') or channel_info.get('title', ''),
- 'description': channel_info.get('description', '')[:500] if channel_info.get('description') else '',
- 'thumbnail_url': thumbnail_url,
- }
-
- except Exception as e:
- logging.error(f"Error getting channel info: {e}")
- raise HTTPException(
- status_code=500,
- detail=f"Error fetching channel info: {str(e)}"
- )
-
-def download_youtube_audio(video_id: str, output_path: str):
- """Download audio for a YouTube video"""
- # Remove .mp3 extension if present to prevent double extension
- if output_path.endswith('.mp3'):
- base_path = output_path[:-4]
- else:
- base_path = output_path
-
- ydl_opts = {
- 'format': 'bestaudio/best',
- 'postprocessors': [{
- 'key': 'FFmpegExtractAudio',
- 'preferredcodec': 'mp3',
- }],
- 'outtmpl': base_path,
- 'ignoreerrors': True, # Add this to not fail on individual errors
- 'socket_timeout': 30, # 30 second socket timeout
- 'timeout': 60, # 60 second overall timeout
- }
- with YoutubeDL(ydl_opts) as ydl:
- ydl.download([f"https://www.youtube.com/watch?v={video_id}"])
-
-
-def process_youtube_videos(database_type, podcast_id: int, channel_id: str, cnx, feed_cutoff: int):
- """Background task to process videos and download audio"""
-
- logging.basicConfig(level=logging.INFO)
- logger = logging.getLogger(__name__)
-
- logger.info("="*50)
- logger.info(f"Starting YouTube channel processing")
- logger.info(f"Podcast ID: {podcast_id}")
- logger.info(f"Channel ID: {channel_id}")
- logger.info("="*50)
-
- try:
- cutoff_date = datetime.datetime.now(datetime.timezone.utc) - timedelta(days=feed_cutoff)
- logger.info(f"Cutoff date set to: {cutoff_date}")
-
- logger.info("Cleaning up videos older than cutoff date...")
- functions.remove_old_youtube_videos(cnx, database_type, podcast_id, cutoff_date)
-
- ydl_opts = {
- 'quiet': True,
- 'no_warnings': True,
- 'extract_flat': True, # Fast initial fetch
- 'ignoreerrors': True,
- 'socket_timeout': 30, # 30 second socket timeout
- 'timeout': 60, # 60 second overall timeout
- }
-
- logger.info("Initializing YouTube-DL with options:")
- logger.info(str(ydl_opts))
-
- recent_videos = []
- with YoutubeDL(ydl_opts) as ydl:
- channel_url = f"https://www.youtube.com/channel/{channel_id}/videos"
- logger.info(f"Fetching channel data from: {channel_url}")
-
- try:
- results = ydl.extract_info(channel_url, download=False)
- logger.info("Initial channel data fetch successful")
- logger.info(f"Raw result keys: {results.keys() if results else 'No results'}")
- except Exception as e:
- logger.error(f"Failed to fetch channel data: {str(e)}")
- raise
-
- if not results or 'entries' not in results:
- logger.error(f"No video list found in results")
- logger.error(f"Available keys: {results.keys() if results else 'None'}")
- return
-
- logger.info(f"Found {len(results.get('entries', []))} total videos")
-
- # Process each video
- for entry in results.get('entries', []):
- if not entry or not entry.get('id'):
- logger.warning(f"Skipping invalid entry: {entry}")
- continue
-
- try:
- video_id = entry['id']
- logger.info(f"\nProcessing video ID: {video_id}")
-
- # Get upload date using BS4 method
- published = functions.get_video_date(video_id)
- if not published:
- logger.warning(f"Could not determine upload date for video {video_id}, skipping")
- continue
-
- logger.info(f"Video publish date: {published}")
-
- if published <= cutoff_date:
- logger.info(f"Video {video_id} from {published} is too old, stopping processing")
- break
-
- video_data = {
- 'id': video_id,
- 'title': entry['title'],
- 'description': entry.get('description', ''),
- 'url': f"https://www.youtube.com/watch?v={video_id}",
- 'thumbnail': entry.get('thumbnails', [{}])[0].get('url', '') if entry.get('thumbnails') else '',
- 'publish_date': published,
- 'duration': entry.get('duration', 0)
- }
-
- logger.info("Collected video data:")
- logger.info(str(video_data))
-
- recent_videos.append(video_data)
- logger.info(f"Successfully added video {video_id} to processing queue")
-
- except Exception as e:
- logger.error(f"Error processing video entry:")
- logger.error(f"Entry data: {entry}")
- logger.error(f"Error: {str(e)}")
- logger.exception("Full traceback:")
- continue
-
- logger.info(f"\nProcessing complete - Found {len(recent_videos)} recent videos")
-
- if recent_videos:
- logger.info("\nStarting database updates")
- try:
- # Get existing videos first
- existing_videos = functions.get_existing_youtube_videos(cnx, database_type, podcast_id)
-
- # Filter out videos that already exist
- new_videos = []
- for video in recent_videos:
- video_url = f"https://www.youtube.com/watch?v={video['id']}"
- if video_url not in existing_videos:
- new_videos.append(video)
- else:
- logger.info(f"Video already exists, skipping: {video['title']}")
-
- if new_videos:
- functions.add_youtube_videos(cnx, database_type, podcast_id, new_videos)
- logger.info(f"Successfully added {len(new_videos)} new videos")
- else:
- logger.info("No new videos to add")
- except Exception as e:
- logger.error("Failed to add videos to database")
- logger.error(str(e))
- logger.exception("Full traceback:")
- raise
-
- logger.info("\nStarting audio downloads")
- successful_downloads = 0
- failed_downloads = 0
-
- for video in recent_videos:
- try:
- output_path = f"/opt/pinepods/downloads/youtube/{video['id']}.mp3"
- output_path_double = f"{output_path}.mp3"
-
- logger.info(f"\nProcessing download for video: {video['id']}")
- logger.info(f"Title: {video['title']}")
- logger.info(f"Target path: {output_path}")
-
- if os.path.exists(output_path) or os.path.exists(output_path_double):
- logger.info(f"Audio file already exists, skipping download")
- continue
-
- logger.info("Starting download...")
- download_youtube_audio(video['id'], output_path)
- logger.info("Download completed successfully")
- successful_downloads += 1
-
- except Exception as e:
- # This is the key fix - properly catch all exceptions and continue
- failed_downloads += 1
-
- # Check for specific error types to provide better logging
- error_msg = str(e)
- if "members-only content" in error_msg.lower():
- logger.warning(f"Skipping video {video['id']} - Members-only content: {video['title']}")
- elif "private" in error_msg.lower():
- logger.warning(f"Skipping video {video['id']} - Private video: {video['title']}")
- elif "unavailable" in error_msg.lower():
- logger.warning(f"Skipping video {video['id']} - Unavailable video: {video['title']}")
- else:
- logger.error(f"Failed to download video {video['id']}: {video['title']}")
- logger.error(f"Error: {error_msg}")
-
- # Always continue to the next video
- continue
-
- logger.info(f"\nDownload summary: {successful_downloads} successful, {failed_downloads} failed")
- else:
- logger.info("No new videos to process")
-
- except Exception as e:
- logger.error("\nFatal error in process_youtube_channel")
- logger.error(str(e))
- logger.exception("Full traceback:")
- raise e
- finally:
- # Use recalculate to ensure accuracy
- try:
- functions.update_episode_count(cnx, database_type, podcast_id)
- except Exception as e:
- logger.error(f"Failed to update episode count: {str(e)}")
- logger.info("\nCleaning up database connection")
- logger.info("="*50)
- logger.info("Channel processing complete")
- logger.info("="*50)
diff --git a/deployment/docker/compose-files/docker-compose-mariadb/docker-compose.yml b/deployment/docker/compose-files/docker-compose-mariadb/docker-compose.yml
new file mode 100644
index 00000000..c536266c
--- /dev/null
+++ b/deployment/docker/compose-files/docker-compose-mariadb/docker-compose.yml
@@ -0,0 +1,55 @@
+services:
+ db:
+ container_name: db
+ image: mariadb:12
+ command: --wait_timeout=1800
+ environment:
+ MYSQL_TCP_PORT: 3306
+ MYSQL_ROOT_PASSWORD: myS3curepass
+ MYSQL_DATABASE: pinepods_database
+ MYSQL_COLLATION_SERVER: utf8mb4_unicode_ci
+ MYSQL_CHARACTER_SET_SERVER: utf8mb4
+ MYSQL_INIT_CONNECT: "SET @@GLOBAL.max_allowed_packet=64*1024*1024;"
+ volumes:
+ - /home/user/pinepods/sql:/var/lib/mysql
+ restart: always
+
+ valkey:
+ image: valkey/valkey:8-alpine
+
+ pinepods:
+ image: madeofpendletonwool/pinepods:latest
+ ports:
+ - "8040:8040"
+ environment:
+ # Basic Server Info
+ SEARCH_API_URL: "https://search.pinepods.online/api/search"
+ PEOPLE_API_URL: "https://people.pinepods.online"
+ HOSTNAME: "http://localhost:8040"
+ # Database Vars
+ DB_TYPE: mariadb
+ DB_HOST: db
+ DB_PORT: 3306
+ DB_USER: root
+ DB_PASSWORD: myS3curepass
+ DB_NAME: pinepods_database
+ # Valkey Settings
+ VALKEY_HOST: valkey
+ VALKEY_PORT: 6379
+ # Enable or Disable Debug Mode for additional Printing
+ DEBUG_MODE: false
+ PUID: ${UID:-911}
+ PGID: ${GID:-911}
+ # Add timezone configuration
+ TZ: "America/New_York"
+
+ volumes:
+ # Mount the download and backup locations on the server
+ - /home/user/pinepods/downloads:/opt/pinepods/downloads
+ - /home/user/pinepods/backups:/opt/pinepods/backups
+ # Timezone volumes, HIGHLY optional. Read the timezone notes below
+ - /etc/localtime:/etc/localtime:ro
+ - /etc/timezone:/etc/timezone:ro
+ depends_on:
+ - db
+ - valkey
diff --git a/deployment/docker/compose-files/docker-compose-mysql/docker-compose.yml b/deployment/docker/compose-files/docker-compose-mysql/docker-compose.yml
index 80af5318..3b9d6f19 100644
--- a/deployment/docker/compose-files/docker-compose-mysql/docker-compose.yml
+++ b/deployment/docker/compose-files/docker-compose-mysql/docker-compose.yml
@@ -1,6 +1,7 @@
services:
db:
- image: mariadb:latest
+ container_name: db
+ image: mysql:9
command: --wait_timeout=1800
environment:
MYSQL_TCP_PORT: 3306
@@ -11,27 +12,20 @@ services:
MYSQL_INIT_CONNECT: "SET @@GLOBAL.max_allowed_packet=64*1024*1024;"
volumes:
- /home/user/pinepods/sql:/var/lib/mysql
- ports:
- - "3306:3306"
restart: always
+
valkey:
image: valkey/valkey:8-alpine
- ports:
- - "6379:6379"
+
pinepods:
image: madeofpendletonwool/pinepods:latest
ports:
- # Pinepods Main Port
- "8040:8040"
environment:
# Basic Server Info
SEARCH_API_URL: "https://search.pinepods.online/api/search"
PEOPLE_API_URL: "https://people.pinepods.online"
- # Default Admin User Information
- USERNAME: myadminuser01
- PASSWORD: myS3curepass
- FULLNAME: Pinepods Admin
- EMAIL: user@pinepods.online
+ HOSTNAME: "http://localhost:8040"
# Database Vars
DB_TYPE: mariadb
DB_HOST: db
@@ -43,17 +37,24 @@ services:
VALKEY_HOST: valkey
VALKEY_PORT: 6379
# Enable or Disable Debug Mode for additional Printing
- DEBUG_MODE: False
+ DEBUG_MODE: false
PUID: ${UID:-911}
PGID: ${GID:-911}
# Add timezone configuration
TZ: "America/New_York"
+ # Language Configuration
+ DEFAULT_LANGUAGE: "en"
volumes:
# Mount the download and the backup location on the server if you want to. You could mount a nas to the downloads folder or something like that.
# The backups directory is used if backups are made on the web version on pinepods. When taking backups on the client version it downloads them locally.
+ volumes:
+ # Mount the download and backup locations on the server
- /home/user/pinepods/downloads:/opt/pinepods/downloads
- /home/user/pinepods/backups:/opt/pinepods/backups
+ # Timezone volumes, HIGHLY optional. Read the timezone notes below
+ - /etc/localtime:/etc/localtime:ro
+ - /etc/timezone:/etc/timezone:ro
depends_on:
- db
- valkey
diff --git a/deployment/docker/compose-files/docker-compose-postgres/docker-compose.yml b/deployment/docker/compose-files/docker-compose-postgres/docker-compose.yml
index bf80ff7b..e9d348a6 100644
--- a/deployment/docker/compose-files/docker-compose-postgres/docker-compose.yml
+++ b/deployment/docker/compose-files/docker-compose-postgres/docker-compose.yml
@@ -1,20 +1,20 @@
services:
db:
- image: postgres:latest
+ container_name: db
+ image: postgres:17
environment:
- POSTGRES_DB: pypods_database
+ POSTGRES_DB: pinepods_database
POSTGRES_USER: postgres
POSTGRES_PASSWORD: myS3curepass
PGDATA: /var/lib/postgresql/data/pgdata
volumes:
- /home/user/pinepods/pgdata:/var/lib/postgresql/data
- ports:
- - "5432:5432"
restart: always
+
valkey:
image: valkey/valkey:8-alpine
- ports:
- - "6379:6379"
+ restart: always
+
pinepods:
image: madeofpendletonwool/pinepods:latest
ports:
@@ -23,11 +23,7 @@ services:
# Basic Server Info
SEARCH_API_URL: "https://search.pinepods.online/api/search"
PEOPLE_API_URL: "https://people.pinepods.online"
- # Default Admin User Information
- USERNAME: myadminuser01
- PASSWORD: myS3curepass
- FULLNAME: Pinepods Admin
- EMAIL: user@pinepods.online
+ HOSTNAME: "http://localhost:8040"
# Database Vars
DB_TYPE: postgresql
DB_HOST: db
@@ -36,18 +32,24 @@ services:
DB_PASSWORD: myS3curepass
DB_NAME: pinepods_database
# Valkey Settings
- VALKEY_HOST: post-valkey
+ VALKEY_HOST: valkey
VALKEY_PORT: 6379
# Enable or Disable Debug Mode for additional Printing
- DEBUG_MODE: False
+ DEBUG_MODE: false
PUID: ${UID:-911}
PGID: ${GID:-911}
# Add timezone configuration
TZ: "America/New_York"
+ # Language Configuration
+ DEFAULT_LANGUAGE: "en"
volumes:
- # Mount the download location on the server if you want to. You could mount a NAS to this folder or something similar
+ # Mount the download and backup locations on the server
- /home/user/pinepods/downloads:/opt/pinepods/downloads
- /home/user/pinepods/backups:/opt/pinepods/backups
+ # Timezone volumes, HIGHLY optional. Read the timezone notes below
+ - /etc/localtime:/etc/localtime:ro
+ - /etc/timezone:/etc/timezone:ro
+ restart: always
depends_on:
- db
- valkey
diff --git a/deployment/kubernetes/helm/pinepods/Chart.yaml b/deployment/kubernetes/helm/pinepods/Chart.yaml
index bab54baa..832fd855 100644
--- a/deployment/kubernetes/helm/pinepods/Chart.yaml
+++ b/deployment/kubernetes/helm/pinepods/Chart.yaml
@@ -10,4 +10,4 @@ dependencies:
- name: valkey
version: 2.0.1
repository: https://charts.bitnami.com/bitnami
- condition: valkey.enabled
\ No newline at end of file
+ condition: valkey.enabled
diff --git a/deployment/kubernetes/helm/pinepods/templates/backend-secret.yaml b/deployment/kubernetes/helm/pinepods/templates/backend-secret.yaml
index e57581a7..8d5cfb3d 100644
--- a/deployment/kubernetes/helm/pinepods/templates/backend-secret.yaml
+++ b/deployment/kubernetes/helm/pinepods/templates/backend-secret.yaml
@@ -9,4 +9,5 @@ type: Opaque
stringData:
API_KEY: {{ .Values.backend.secrets.apiKey | quote }}
API_SECRET: {{ .Values.backend.secrets.apiSecret | quote }}
+ YOUTUBE_API_KEY: {{ .Values.backend.secrets.youtubeApiKey | quote }}
{{- end }}
\ No newline at end of file
diff --git a/deployment/kubernetes/helm/pinepods/templates/deployment.yaml b/deployment/kubernetes/helm/pinepods/templates/deployment.yaml
index 5ef79057..96b0007b 100644
--- a/deployment/kubernetes/helm/pinepods/templates/deployment.yaml
+++ b/deployment/kubernetes/helm/pinepods/templates/deployment.yaml
@@ -29,13 +29,16 @@ spec:
- secretRef:
name: {{ include "pinepods.fullname" $ }}-env
env:
- {{ if (and (not .Values.postgresql.enabled) (.Values.externalDatabase.existingSecret.enabled)) -}}
+ {{ if (and (not .Values.postgresql.enabled) (.Values.externalDatabase.existingSecret.enabled) (not .Values.externalDatabase.manuallyProvide)) -}}
- name: DB_PASSWORD
valueFrom:
secretKeyRef:
name: {{ .Values.externalDatabase.existingSecret.name }}
key: {{ .Values.externalDatabase.existingSecret.key }}
{{- end }}
+ {{- with .Values.extraEnv}}
+ {{- toYaml . | nindent 12 }}
+ {{- end }}
volumeMounts:
{{- if .Values.persistence.enabled }}
- name: downloads
diff --git a/deployment/kubernetes/helm/pinepods/templates/secret.yaml b/deployment/kubernetes/helm/pinepods/templates/secret.yaml
index 441c480b..0592e4d4 100644
--- a/deployment/kubernetes/helm/pinepods/templates/secret.yaml
+++ b/deployment/kubernetes/helm/pinepods/templates/secret.yaml
@@ -8,7 +8,7 @@
{{ $_ := set $env "DB_NAME" "pinepods_database" }}
{{ $_ := set $env "DB_USER" "postgres" }}
{{ $_ := set $env "DB_PASSWORD" .Values.postgresql.auth.password }}
-{{ else }}
+{{ else if not .Values.externalDatabase.manuallyProvide }}
{{ $_ := set $env "DB_TYPE" .Values.externalDatabase.type }}
{{ $_ := set $env "DB_HOST" .Values.externalDatabase.host }}
{{ $_ := set $env "DB_PORT" .Values.externalDatabase.port }}
diff --git a/deployment/kubernetes/helm/pinepods/values.yaml b/deployment/kubernetes/helm/pinepods/values.yaml
index 81076a17..5a8ddbf3 100644
--- a/deployment/kubernetes/helm/pinepods/values.yaml
+++ b/deployment/kubernetes/helm/pinepods/values.yaml
@@ -80,6 +80,8 @@ postgresql:
# -- Enable PostgreSQL deployment
# Set to false if using external database
enabled: true
+ image:
+ repository: bitnamilegacy/postgresql
auth:
# -- PostgreSQL username
username: postgres
@@ -130,6 +132,8 @@ externalDatabase:
enabled: false
name: existing-secret
key: password
+ # -- manually provide the DB environment variables under extraEnv. Useful for DB solutions like CNPG
+ manuallyProvide: false
resources: {}
@@ -137,6 +141,8 @@ resources: {}
valkey:
# -- Enable Valkey deployment
enabled: true
+ image:
+ repository: bitnamilegacy/valkey
architecture: standalone # This prevents replica creation
auth:
enabled: false
@@ -178,6 +184,16 @@ env:
# Set to true for additional logging
DEBUG_MODE: "false"
+## Allows you to specify additional environment values directly on the application pod. Useful for specific configmap or secret references.
+extraEnv: []
+# - name: CUSTOM_VARIABLE
+# value: "true"
+# - name: DB_HOST
+# valueFrom:
+# secretKeyRef:
+# name: pinepods-db-app
+# key: host
+
## Pod Security Context
securityContext: {}
# fsGroup: 2000
@@ -219,6 +235,7 @@ backend:
secrets:
apiKey: "MYPODCASTINDEXKEY"
apiSecret: "MYPODCASTINDEXSECRET"
+ youtubeApiKey: "YOUR_YOUTUBE_API_KEY_HERE"
ingress:
enabled: true
className: ""
@@ -263,4 +280,4 @@ podpeople:
paths:
- path: /
pathType: Prefix
- tls: []
\ No newline at end of file
+ tls: []
diff --git a/dockerfile b/dockerfile
index d2eadb2e..e5a2e39e 100644
--- a/dockerfile
+++ b/dockerfile
@@ -38,14 +38,14 @@ COPY ./gpodder-api/config ./config
COPY ./gpodder-api/internal ./internal
# Build the application
-RUN CGO_ENABLED=0 GOOS=linux go build -o gpodder-api ./cmd/server/
+RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o gpodder-api ./cmd/server/
# Python builder stage for database setup
FROM python:3.11-alpine AS python-builder
WORKDIR /build
-# Install build dependencies for PyInstaller
-RUN apk add --no-cache gcc musl-dev libffi-dev openssl-dev
+# Install build dependencies for PyInstaller and MariaDB connector
+RUN apk add --no-cache gcc musl-dev libffi-dev openssl-dev mariadb-connector-c-dev
# Copy Python source files
COPY ./database_functions ./database_functions
@@ -90,24 +90,35 @@ ENV OPENSSL_LIB_DIR=/usr/lib
ENV OPENSSL_INCLUDE_DIR=/usr/include
# Build the Rust API
-RUN cargo build --release
+RUN cargo build --release && strip target/release/pinepods-api
# Final stage for setting up runtime environment
FROM alpine
# Metadata
LABEL maintainer="Collin Pendleton "
-# Install runtime dependencies (removed python3, py3-pip, cronie, and openrc)
-RUN apk add --no-cache tzdata nginx openssl bash mariadb-client postgresql-client curl ffmpeg supervisor wget jq
+# Install runtime dependencies
+RUN apk add --no-cache tzdata nginx openssl bash mariadb-client postgresql-client curl ffmpeg wget jq mariadb-connector-c-dev
-# Download and install latest yt-dlp binary
+
+# Download and install latest yt-dlp binary (musllinux for Alpine)
RUN LATEST_VERSION=$(curl -s https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest | jq -r .tag_name) && \
- wget -O /usr/local/bin/yt-dlp "https://github.com/yt-dlp/yt-dlp/releases/download/${LATEST_VERSION}/yt-dlp_linux" && \
+ wget -O /usr/local/bin/yt-dlp "https://github.com/yt-dlp/yt-dlp/releases/download/${LATEST_VERSION}/yt-dlp_musllinux" && \
chmod +x /usr/local/bin/yt-dlp
+
+# Download and install Horust (x86_64)
+RUN wget -O /tmp/horust.tar.gz "https://github.com/FedericoPonzi/Horust/releases/download/v0.1.11/horust-x86_64-unknown-linux-musl.tar.gz" && \
+ cd /tmp && tar -xzf horust.tar.gz && \
+ mv horust /usr/local/bin/ && \
+ chmod +x /usr/local/bin/horust && \
+ rm -f /tmp/horust.tar.gz
+
ENV TZ=UTC
# Copy compiled database setup binary (replaces Python dependency)
COPY --from=python-builder /build/dist/pinepods-db-setup /usr/local/bin/
# Copy built files from the builder stage to the Nginx serving directory
COPY --from=builder /app/dist /var/www/html/
+# Copy translation files for the Rust API to access
+COPY ./web/src/translations /var/www/html/static/translations
# Copy Go API binary from the go-builder stage
COPY --from=go-builder /gpodder-api/gpodder-api /usr/local/bin/
# Copy Rust API binary from the rust-api-builder stage
@@ -119,7 +130,7 @@ COPY startup/startup.sh /startup.sh
RUN chmod +x /startup.sh
# Copy Pinepods runtime files
RUN mkdir -p /pinepods
-RUN mkdir -p /var/log/supervisor/
+RUN mkdir -p /var/log/pinepods/ && mkdir -p /etc/horust/services/
COPY startup/ /pinepods/startup/
# Legacy cron scripts removed - background tasks now handled by internal Rust scheduler
COPY clients/ /pinepods/clients/
diff --git a/dockerfile-arm b/dockerfile-arm
index cf42d836..fdf61547 100644
--- a/dockerfile-arm
+++ b/dockerfile-arm
@@ -3,9 +3,6 @@ FROM rust:alpine AS builder
# Install build dependencies
RUN apk update && apk upgrade && \
apk add --no-cache musl-dev libffi-dev zlib-dev jpeg-dev
-# Install wasm target and build tools
-RUN rustup target add wasm32-unknown-unknown && \
- cargo install wasm-bindgen-cli
RUN apk update && apk upgrade
# Add the Edge Community repository
RUN echo "@edge http://dl-cdn.alpinelinux.org/alpine/edge/community" >> /etc/apk/repositories
@@ -13,25 +10,69 @@ RUN echo "@edge http://dl-cdn.alpinelinux.org/alpine/edge/community" >> /etc/apk
RUN apk update
# Install the desired package from the edge community repository
RUN apk add trunk@edge
+# Install wasm target and build tools
+RUN rustup target add wasm32-unknown-unknown && \
+ cargo install wasm-bindgen-cli && \
+ cargo install horust --locked
+
+# Test wasm-bindgen installation before full build
+RUN echo "Testing wasm-bindgen installation..." && \
+ which wasm-bindgen && \
+ wasm-bindgen --version && \
+ ls -la /usr/local/cargo/bin/ && \
+ echo "wasm-bindgen test completed"
+
+# Test trunk installation
+RUN echo "Testing trunk installation..." && \
+ which trunk && \
+ trunk --version && \
+ echo "trunk test completed"
+
# Add application files to the builder stage
COPY ./web/Cargo.lock ./web/Cargo.toml ./web/dev-info.md ./web/index.html ./web/tailwind.config.js ./web/Trunk.toml /app/
COPY ./web/src /app/src
COPY ./web/static /app/static
WORKDIR /app
-# Initialize trunk first
-RUN cd /tmp && \
- echo "" > dummy.rs && \
- trunk build dummy.rs || true
-RUN cargo install --locked wasm-bindgen-cli
-# Build the Yew application in release mode
-RUN RUSTFLAGS="--cfg=web_sys_unstable_apis --cfg getrandom_backend=\"wasm_js\"" trunk build --features server_build --release
+
+# Test that trunk can find wasm-bindgen before full build
+RUN echo "Testing if trunk can find wasm-bindgen..." && \
+ RUST_LOG=debug trunk build --help && \
+ echo "trunk can find wasm-bindgen"
+# Auto-detect wasm-bindgen version and replace trunk's glibc binary with our musl one
+RUN WASM_BINDGEN_VERSION=$(grep -A1 "name = \"wasm-bindgen\"" /app/Cargo.lock | grep "version = " | cut -d'"' -f2) && \
+ echo "Detected wasm-bindgen version: $WASM_BINDGEN_VERSION" && \
+ RUSTFLAGS="--cfg=web_sys_unstable_apis --cfg getrandom_backend=\"wasm_js\"" timeout 30 trunk build --features server_build --release || \
+ (echo "Build failed as expected, replacing downloaded binary..." && \
+ mkdir -p /root/.cache/trunk/wasm-bindgen-$WASM_BINDGEN_VERSION && \
+ cp /usr/local/cargo/bin/wasm-bindgen /root/.cache/trunk/wasm-bindgen-$WASM_BINDGEN_VERSION/ && \
+ echo "Retrying build with musl binary..." && \
+ RUSTFLAGS="--cfg=web_sys_unstable_apis --cfg getrandom_backend=\"wasm_js\"" trunk build --features server_build --release)
+
+# Go builder stage for the gpodder API
+FROM golang:alpine AS go-builder
+WORKDIR /gpodder-api
+
+# Install build dependencies
+RUN apk add --no-cache git
+
+# Copy go module files first for better layer caching
+COPY ./gpodder-api/go.mod ./gpodder-api/go.sum ./
+RUN go mod download
+
+# Copy the rest of the source code
+COPY ./gpodder-api/cmd ./cmd
+COPY ./gpodder-api/config ./config
+COPY ./gpodder-api/internal ./internal
+
+# Build the application
+RUN CGO_ENABLED=0 GOOS=linux go build -o gpodder-api ./cmd/server/
# Python builder stage for database setup
FROM python:3.11-alpine AS python-builder
WORKDIR /build
-# Install build dependencies for PyInstaller
-RUN apk add --no-cache gcc musl-dev libffi-dev openssl-dev
+# Install build dependencies for PyInstaller and MariaDB connector
+RUN apk add --no-cache gcc musl-dev libffi-dev openssl-dev mariadb-connector-c-dev
# Copy Python source files
COPY ./database_functions ./database_functions
@@ -59,64 +100,83 @@ RUN pyinstaller --onefile \
--console \
startup/setup_database_new.py
+# Rust API builder stage
+FROM rust:alpine AS rust-api-builder
+WORKDIR /rust-api
+
+# Install build dependencies
+RUN apk add --no-cache musl-dev pkgconfig openssl-dev openssl-libs-static
+
+# Copy Rust API files
+COPY ./rust-api/Cargo.toml ./rust-api/Cargo.lock ./
+COPY ./rust-api/src ./src
+
+# Set environment for static linking
+ENV OPENSSL_STATIC=1
+ENV OPENSSL_LIB_DIR=/usr/lib
+ENV OPENSSL_INCLUDE_DIR=/usr/include
+
+# Build the Rust API
+RUN cargo build --release
+
# Final stage for setting up runtime environment
FROM alpine
# Metadata
LABEL maintainer="Collin Pendleton "
-# Install runtime dependencies (removed Python)
-RUN apk update && apk upgrade && \
- apk add --no-cache nginx tzdata openssl bash mariadb-client curl ffmpeg supervisor wget jq && \
- rm -rf /var/cache/apk/*
+# Install runtime dependencies
+RUN apk add --no-cache tzdata nginx openssl bash mariadb-client postgresql-client curl ffmpeg wget jq mariadb-connector-c-dev
-# Download and install latest yt-dlp binary for ARM64
+
+# Download and install latest yt-dlp binary for ARM64 (musllinux for Alpine)
RUN LATEST_VERSION=$(curl -s https://api.github.com/repos/yt-dlp/yt-dlp/releases/latest | jq -r .tag_name) && \
- wget -O /usr/local/bin/yt-dlp "https://github.com/yt-dlp/yt-dlp/releases/download/${LATEST_VERSION}/yt-dlp_linux_aarch64" && \
+ wget -O /usr/local/bin/yt-dlp "https://github.com/yt-dlp/yt-dlp/releases/download/${LATEST_VERSION}/yt-dlp_musllinux_aarch64" && \
chmod +x /usr/local/bin/yt-dlp
-ENV TZ=UTC
-# Set environment variables
-ENV APP_ROOT=/pinepods
+# Copy Horust binary from builder stage
+COPY --from=builder /usr/local/cargo/bin/horust /usr/local/bin/
+ENV TZ=UTC
# Copy compiled database setup binary (replaces Python dependency)
COPY --from=python-builder /build/dist/pinepods-db-setup /usr/local/bin/
-
-# Copy wait-for-it script
-COPY wait-for-it/wait-for-it.sh /wait-for-it.sh
-RUN chmod +x /wait-for-it.sh
-
-# Copy built files from the builder stage
+# Copy built files from the builder stage to the Nginx serving directory
COPY --from=builder /app/dist /var/www/html/
-
-# Set up application directories and files
+# Copy translation files for the Rust API to access
+COPY ./web/src/translations /var/www/html/static/translations
+# Copy Go API binary from the go-builder stage
+COPY --from=go-builder /gpodder-api/gpodder-api /usr/local/bin/
+# Copy Rust API binary from the rust-api-builder stage
+COPY --from=rust-api-builder /rust-api/target/release/pinepods-api /usr/local/bin/
+# Move to the root directory to execute the startup script
WORKDIR /
+# Copy startup scripts
COPY startup/startup.sh /startup.sh
RUN chmod +x /startup.sh
-
-RUN mkdir -p /pinepods /var/log/supervisor/
+# Copy Pinepods runtime files
+RUN mkdir -p /pinepods
+RUN mkdir -p /var/log/pinepods/ && mkdir -p /etc/horust/services/
COPY startup/ /pinepods/startup/
+# Legacy cron scripts removed - background tasks now handled by internal Rust scheduler
COPY clients/ /pinepods/clients/
COPY database_functions/ /pinepods/database_functions/
-# Legacy cron scripts removed - background tasks now handled by internal Rust scheduler
RUN chmod +x /pinepods/startup/startup.sh
-
-# Clean things up
-RUN rm -rf \
- /var/cache/apk/* \
- /root/.cache \
- /tmp/* \
- /var/tmp/* \
- /usr/share/man \
- /usr/share/doc
-
-# Define and set the version
+ENV APP_ROOT=/pinepods
+# Define the build argument
ARG PINEPODS_VERSION
+# Write the Pinepods version to the current_version file
RUN echo "${PINEPODS_VERSION}" > /pinepods/current_version
+# Configure Nginx
+COPY startup/nginx.conf /etc/nginx/nginx.conf
+
+# Copy script to start gpodder API
+COPY ./gpodder-api/start-gpodder.sh /usr/local/bin/
+RUN chmod +x /usr/local/bin/start-gpodder.sh
RUN cp /usr/share/zoneinfo/UTC /etc/localtime && \
echo "UTC" > /etc/timezone
-# Configure Nginx
-COPY startup/nginx.conf /etc/nginx/nginx.conf
+# Expose ports
+EXPOSE 8080 8000
+# Start everything using the startup script
ENTRYPOINT ["bash", "/startup.sh"]
diff --git a/docs/index.yaml b/docs/index.yaml
index 60af1f82..6a118d21 100644
--- a/docs/index.yaml
+++ b/docs/index.yaml
@@ -2,7 +2,45 @@ apiVersion: v1
entries:
pinepods:
- apiVersion: v2
- created: "2025-05-14T12:45:57.821873003Z"
+ created: "2025-11-04T13:32:05.132884772Z"
+ dependencies:
+ - condition: postgresql.enabled
+ name: postgresql
+ repository: https://charts.bitnami.com/bitnami
+ version: 15.5.14
+ - condition: valkey.enabled
+ name: valkey
+ repository: https://charts.bitnami.com/bitnami
+ version: 2.0.1
+ description: A Helm chart for deploying Pinepods - A complete podcast management
+ system and allows you to play, download, and keep track of podcasts you enjoy.
+ All self hosted and enjoyed on your own server!
+ digest: dd25bb9bde17df0b5d3fbb8a56aa23fe358734577870f8801d55123ef6a80570
+ name: pinepods
+ urls:
+ - https://helm.pinepods.online/pinepods-0.8.2.tgz
+ version: 0.8.2
+ - apiVersion: v2
+ created: "2025-11-04T13:32:05.123523925Z"
+ dependencies:
+ - condition: postgresql.enabled
+ name: postgresql
+ repository: https://charts.bitnami.com/bitnami
+ version: 15.5.14
+ - condition: valkey.enabled
+ name: valkey
+ repository: https://charts.bitnami.com/bitnami
+ version: 2.0.1
+ description: A Helm chart for deploying Pinepods - A complete podcast management
+ system and allows you to play, download, and keep track of podcasts you enjoy.
+ All self hosted and enjoyed on your own server!
+ digest: 28e32586ecbdfc1749890007055c61add7b78076cee90980d425113b38b13b9c
+ name: pinepods
+ urls:
+ - https://helm.pinepods.online/pinepods-0.8.1.tgz
+ version: 0.8.1
+ - apiVersion: v2
+ created: "2025-11-04T13:32:05.114082893Z"
dependencies:
- condition: postgresql.enabled
name: postgresql
@@ -21,7 +59,7 @@ entries:
- https://helm.pinepods.online/pinepods-0.7.8.tgz
version: 0.7.8
- apiVersion: v2
- created: "2025-05-14T12:45:57.81219332Z"
+ created: "2025-11-04T13:32:05.104601718Z"
dependencies:
- condition: postgresql.enabled
name: postgresql
@@ -40,7 +78,7 @@ entries:
- https://helm.pinepods.online/pinepods-0.7.7.tgz
version: 0.7.7
- apiVersion: v2
- created: "2025-05-14T12:45:57.802612391Z"
+ created: "2025-11-04T13:32:05.095243553Z"
dependencies:
- condition: postgresql.enabled
name: postgresql
@@ -59,7 +97,7 @@ entries:
- https://helm.pinepods.online/pinepods-0.7.6.tgz
version: 0.7.6
- apiVersion: v2
- created: "2025-05-14T12:45:57.793019863Z"
+ created: "2025-11-04T13:32:05.08593408Z"
dependencies:
- condition: postgresql.enabled
name: postgresql
@@ -78,7 +116,7 @@ entries:
- https://helm.pinepods.online/pinepods-0.7.5.tgz
version: 0.7.5
- apiVersion: v2
- created: "2025-05-14T12:45:57.783514329Z"
+ created: "2025-11-04T13:32:05.076444979Z"
dependencies:
- condition: postgresql.enabled
name: postgresql
@@ -97,7 +135,7 @@ entries:
- https://helm.pinepods.online/pinepods-0.7.4.tgz
version: 0.7.4
- apiVersion: v2
- created: "2025-05-14T12:45:57.77388794Z"
+ created: "2025-11-04T13:32:05.067177875Z"
dependencies:
- condition: postgresql.enabled
name: postgresql
@@ -116,7 +154,7 @@ entries:
- https://helm.pinepods.online/pinepods-0.7.3.tgz
version: 0.7.3
- apiVersion: v2
- created: "2025-05-14T12:45:57.764225682Z"
+ created: "2025-11-04T13:32:05.057181838Z"
dependencies:
- condition: postgresql.enabled
name: postgresql
@@ -135,7 +173,7 @@ entries:
- https://helm.pinepods.online/pinepods-0.7.2.tgz
version: 0.7.2
- apiVersion: v2
- created: "2025-05-14T12:45:57.754821944Z"
+ created: "2025-11-04T13:32:05.04708939Z"
dependencies:
- condition: postgresql.enabled
name: postgresql
@@ -154,7 +192,7 @@ entries:
- https://helm.pinepods.online/pinepods-0.7.1.tgz
version: 0.7.1
- apiVersion: v2
- created: "2025-05-14T12:45:57.744826392Z"
+ created: "2025-11-04T13:32:05.037629207Z"
dependencies:
- condition: postgresql.enabled
name: postgresql
@@ -173,7 +211,7 @@ entries:
- https://helm.pinepods.online/pinepods-0.7.0.tgz
version: 0.7.0
- apiVersion: v2
- created: "2025-05-14T12:45:57.734393365Z"
+ created: "2025-11-04T13:32:05.028436822Z"
dependencies:
- name: postgresql
repository: https://charts.bitnami.com/bitnami
@@ -187,7 +225,7 @@ entries:
- https://helm.pinepods.online/pinepods-0.6.6.tgz
version: 0.6.6
- apiVersion: v2
- created: "2025-05-14T12:45:57.730412407Z"
+ created: "2025-11-04T13:32:05.023937037Z"
dependencies:
- name: postgresql
repository: https://charts.bitnami.com/bitnami
@@ -201,7 +239,7 @@ entries:
- https://helm.pinepods.online/pinepods-0.6.5.tgz
version: 0.6.5
- apiVersion: v2
- created: "2025-05-14T12:45:57.725514296Z"
+ created: "2025-11-04T13:32:05.020029522Z"
dependencies:
- name: postgresql
repository: https://charts.bitnami.com/bitnami
@@ -215,7 +253,7 @@ entries:
- https://helm.pinepods.online/pinepods-0.6.4.tgz
version: 0.6.4
- apiVersion: v2
- created: "2025-05-14T12:45:57.72084837Z"
+ created: "2025-11-04T13:32:05.015236734Z"
dependencies:
- name: postgresql
repository: https://charts.bitnami.com/bitnami
@@ -229,7 +267,7 @@ entries:
- https://helm.pinepods.online/pinepods-0.6.3.tgz
version: 0.6.3
- apiVersion: v2
- created: "2025-05-14T12:45:57.716786601Z"
+ created: "2025-11-04T13:32:05.010471203Z"
dependencies:
- name: postgresql
repository: https://charts.bitnami.com/bitnami
@@ -242,4 +280,4 @@ entries:
urls:
- https://helm.pinepods.online/pinepods-0.6.2.tgz
version: 0.6.2
-generated: "2025-05-14T12:45:57.711649484Z"
+generated: "2025-11-04T13:32:05.006291589Z"
diff --git a/docs/pinepods-0.8.1.tgz b/docs/pinepods-0.8.1.tgz
new file mode 100644
index 00000000..81b0d6ba
Binary files /dev/null and b/docs/pinepods-0.8.1.tgz differ
diff --git a/docs/pinepods-0.8.2.tgz b/docs/pinepods-0.8.2.tgz
new file mode 100644
index 00000000..deb75083
Binary files /dev/null and b/docs/pinepods-0.8.2.tgz differ
diff --git a/fastlane/metadata/android/en-US/full_description.txt b/fastlane/metadata/android/en-US/full_description.txt
index 1084350b..80dc00a6 100644
--- a/fastlane/metadata/android/en-US/full_description.txt
+++ b/fastlane/metadata/android/en-US/full_description.txt
@@ -1,6 +1,7 @@
PinePods is a complete podcast management solution that allows you to host your own podcast server and enjoy a beautiful mobile experience.
Features:
+
• Self-hosted podcast server synchronization
• Beautiful, intuitive mobile interface
• Download episodes for offline listening
@@ -14,4 +15,4 @@ Features:
PinePods gives you complete control over your podcast experience while providing the convenience of modern podcast apps. Perfect for users who want privacy, control, and a great listening experience.
-Note: This app requires a PinePods server to be set up. Visit the PinePods GitHub repository for server installation instructions.
\ No newline at end of file
+Note: This app requires a PinePods server to be set up. Visit the PinePods GitHub repository for server installation instructions.
diff --git a/fastlane/metadata/android/en-US/images/featureGraphic.png b/fastlane/metadata/android/en-US/images/featureGraphic.png
new file mode 100644
index 00000000..e7aab93a
Binary files /dev/null and b/fastlane/metadata/android/en-US/images/featureGraphic.png differ
diff --git a/fastlane/metadata/android/en-US/images/icon.png b/fastlane/metadata/android/en-US/images/icon.png
new file mode 100644
index 00000000..4fe781cf
Binary files /dev/null and b/fastlane/metadata/android/en-US/images/icon.png differ
diff --git a/fastlane/metadata/android/en-US/images/phoneScreenshots/1.png b/fastlane/metadata/android/en-US/images/phoneScreenshots/1.png
new file mode 100644
index 00000000..a5c21ee7
Binary files /dev/null and b/fastlane/metadata/android/en-US/images/phoneScreenshots/1.png differ
diff --git a/fastlane/metadata/android/en-US/images/phoneScreenshots/2.png b/fastlane/metadata/android/en-US/images/phoneScreenshots/2.png
new file mode 100644
index 00000000..668c4072
Binary files /dev/null and b/fastlane/metadata/android/en-US/images/phoneScreenshots/2.png differ
diff --git a/fastlane/metadata/android/en-US/images/phoneScreenshots/3.png b/fastlane/metadata/android/en-US/images/phoneScreenshots/3.png
new file mode 100644
index 00000000..6182ba2b
Binary files /dev/null and b/fastlane/metadata/android/en-US/images/phoneScreenshots/3.png differ
diff --git a/fastlane/metadata/android/en-US/images/phoneScreenshots/4.png b/fastlane/metadata/android/en-US/images/phoneScreenshots/4.png
new file mode 100644
index 00000000..1d45d009
Binary files /dev/null and b/fastlane/metadata/android/en-US/images/phoneScreenshots/4.png differ
diff --git a/fastlane/metadata/android/en-US/images/phoneScreenshots/5.png b/fastlane/metadata/android/en-US/images/phoneScreenshots/5.png
new file mode 100644
index 00000000..4c004426
Binary files /dev/null and b/fastlane/metadata/android/en-US/images/phoneScreenshots/5.png differ
diff --git a/gpodder-api/internal/api/auth.go b/gpodder-api/internal/api/auth.go
index 651ca9bf..93bffa12 100644
--- a/gpodder-api/internal/api/auth.go
+++ b/gpodder-api/internal/api/auth.go
@@ -30,7 +30,6 @@ type argon2Params struct {
// AuthMiddleware creates a middleware for authentication
func AuthMiddleware(db *db.PostgresDB) gin.HandlerFunc {
return func(c *gin.Context) {
- log.Printf("[DEBUG] AuthMiddleware processing request: %s %s", c.Request.Method, c.Request.URL.Path)
// Get the username from the URL parameters
username := c.Param("username")
@@ -41,12 +40,9 @@ func AuthMiddleware(db *db.PostgresDB) gin.HandlerFunc {
return
}
- log.Printf("[DEBUG] AuthMiddleware: Processing request for username: %s", username)
-
// Check if this is an internal API call via X-GPodder-Token
gpodderTokenHeader := c.GetHeader("X-GPodder-Token")
if gpodderTokenHeader != "" {
- log.Printf("[DEBUG] AuthMiddleware: Found X-GPodder-Token header")
// Get user data
var userID int
@@ -76,7 +72,6 @@ func AuthMiddleware(db *db.PostgresDB) gin.HandlerFunc {
// For internal calls with X-GPodder-Token header, validate token directly
if gpodderToken.Valid && gpodderToken.String == gpodderTokenHeader {
- log.Printf("[DEBUG] AuthMiddleware: X-GPodder-Token validated for user: %s", username)
c.Set("userID", userID)
c.Set("username", username)
c.Next()
@@ -176,13 +171,11 @@ func AuthMiddleware(db *db.PostgresDB) gin.HandlerFunc {
// Check if this is a gpodder token authentication
// Check if this is a gpodder token authentication
if gpodderToken.Valid && (gpodderToken.String == password || gpodderToken.String == gpodderTokenHeader) {
- log.Printf("[DEBUG] AuthMiddleware: User authenticated with gpodder token: %s", username)
authenticated = true
}
// If token auth didn't succeed, try password authentication
if !authenticated && verifyPassword(password, hashedPassword) {
- log.Printf("[DEBUG] AuthMiddleware: User authenticated with password: %s", username)
authenticated = true
}
@@ -589,18 +582,34 @@ func AuthenticationMiddleware(database *db.Database) gin.HandlerFunc {
log.Printf("[DEBUG] AuthenticationMiddleware processing request: %s %s",
c.Request.Method, c.Request.URL.Path)
- if strings.Contains(c.Request.URL.Path, "/episodes/") && strings.HasSuffix(c.Request.URL.Path, ".json") {
- // Extract username from URL path for episode actions
+ // Handle GPodder API standard .json suffix patterns
+ if strings.HasSuffix(c.Request.URL.Path, ".json") {
parts := strings.Split(c.Request.URL.Path, "/")
- if len(parts) >= 3 {
- // The path format is /episodes/username.json
+ var username string
+
+ // Handle /episodes/username.json pattern
+ if strings.Contains(c.Request.URL.Path, "/episodes/") && len(parts) >= 3 {
usernameWithExt := parts[len(parts)-1]
- // Remove .json extension
- username := strings.TrimSuffix(usernameWithExt, ".json")
- // Set it as the username parameter
- c.Params = append(c.Params, gin.Param{Key: "username", Value: username})
+ username = strings.TrimSuffix(usernameWithExt, ".json")
log.Printf("[DEBUG] AuthenticationMiddleware: Extracted username '%s' from episode actions URL", username)
}
+
+ // Handle /devices/username.json pattern
+ if strings.Contains(c.Request.URL.Path, "/devices/") {
+ for i, part := range parts {
+ if part == "devices" && i+1 < len(parts) {
+ usernameWithExt := parts[i+1]
+ username = strings.TrimSuffix(usernameWithExt, ".json")
+ log.Printf("[DEBUG] AuthenticationMiddleware: Extracted username '%s' from devices URL", username)
+ break
+ }
+ }
+ }
+
+ // Set username parameter if extracted
+ if username != "" {
+ c.Params = append(c.Params, gin.Param{Key: "username", Value: username})
+ }
}
// First try session auth
diff --git a/gpodder-api/internal/api/device.go b/gpodder-api/internal/api/device.go
index 3fb75cb2..88502485 100644
--- a/gpodder-api/internal/api/device.go
+++ b/gpodder-api/internal/api/device.go
@@ -154,8 +154,6 @@ func updateDeviceData(database *db.Database) gin.HandlerFunc {
return
}
- log.Printf("[DEBUG] All URL parameters: %v", c.Params)
-
// Get device name from URL with fix for .json suffix
deviceName := c.Param("deviceid")
// Also try alternative parameter name if needed
diff --git a/gpodder-api/internal/api/episode.go b/gpodder-api/internal/api/episode.go
index cf22106f..9f0c45f0 100644
--- a/gpodder-api/internal/api/episode.go
+++ b/gpodder-api/internal/api/episode.go
@@ -18,7 +18,6 @@ import (
// getEpisodeActions handles GET /api/2/episodes/{username}.json
func getEpisodeActions(database *db.Database) gin.HandlerFunc {
return func(c *gin.Context) {
- log.Printf("[DEBUG] getEpisodeActions handling request: %s %s", c.Request.Method, c.Request.URL.Path)
// Get user ID from middleware
userID, exists := c.Get("userID")
@@ -34,9 +33,6 @@ func getEpisodeActions(database *db.Database) gin.HandlerFunc {
deviceName := c.Query("device")
aggregated := c.Query("aggregated") == "true"
- log.Printf("[DEBUG] getEpisodeActions: Parameters - since=%s, podcast=%s, device=%s, aggregated=%v",
- sinceStr, podcastURL, deviceName, aggregated)
-
// Get device ID if provided
var deviceID *int
if deviceName != "" {
@@ -105,7 +101,14 @@ func getEpisodeActions(database *db.Database) gin.HandlerFunc {
latestTimestamp = time.Now().Unix() // Fallback to current time
}
- // Build query based on parameters
+ // Performance optimization: Add limits and optimize query structure
+ const MAX_EPISODE_ACTIONS = 25000 // Limit raised to 25k to handle power users while preventing DoS
+
+ // Log query performance info
+ log.Printf("[DEBUG] getEpisodeActions: Query for user %v with since=%d, device=%s, aggregated=%v",
+ userID, since, deviceName, aggregated)
+
+ // Build query based on parameters with performance optimizations
var queryParts []string
if database.IsPostgreSQLDB() {
@@ -113,7 +116,7 @@ func getEpisodeActions(database *db.Database) gin.HandlerFunc {
"SELECT " +
"e.ActionID, e.UserID, e.DeviceID, e.PodcastURL, e.EpisodeURL, " +
"e.Action, e.Timestamp, e.Started, e.Position, e.Total, " +
- "d.DeviceName " +
+ "COALESCE(d.DeviceName, '') as DeviceName " +
"FROM \"GpodderSyncEpisodeActions\" e " +
"LEFT JOIN \"GpodderDevices\" d ON e.DeviceID = d.DeviceID " +
"WHERE e.UserID = $1",
@@ -123,7 +126,7 @@ func getEpisodeActions(database *db.Database) gin.HandlerFunc {
"SELECT " +
"e.ActionID, e.UserID, e.DeviceID, e.PodcastURL, e.EpisodeURL, " +
"e.Action, e.Timestamp, e.Started, e.Position, e.Total, " +
- "d.DeviceName " +
+ "COALESCE(d.DeviceName, '') as DeviceName " +
"FROM GpodderSyncEpisodeActions e " +
"LEFT JOIN GpodderDevices d ON e.DeviceID = d.DeviceID " +
"WHERE e.UserID = ?",
@@ -182,8 +185,9 @@ func getEpisodeActions(database *db.Database) gin.HandlerFunc {
e.Timestamp = la.max_timestamp
LEFT JOIN "GpodderDevices" d ON e.DeviceID = d.DeviceID
WHERE e.UserID = $1
- ORDER BY e.Timestamp DESC
- `, conditionsStr)
+ ORDER BY e.Timestamp ASC
+ LIMIT %d
+ `, conditionsStr, MAX_EPISODE_ACTIONS)
} else {
// For MySQL, we need to use ? placeholders and rebuild the argument list
args = []interface{}{userID} // Reset args to just include userID for now
@@ -235,8 +239,9 @@ func getEpisodeActions(database *db.Database) gin.HandlerFunc {
e.Timestamp = la.max_timestamp
LEFT JOIN GpodderDevices d ON e.DeviceID = d.DeviceID
WHERE e.UserID = ?
- ORDER BY e.Timestamp DESC
- `, conditionsStr)
+ ORDER BY e.Timestamp ASC
+ LIMIT %d
+ `, conditionsStr, MAX_EPISODE_ACTIONS)
}
} else {
// Simple query with ORDER BY
@@ -275,18 +280,34 @@ func getEpisodeActions(database *db.Database) gin.HandlerFunc {
}
}
+ // ORDER BY DESC (newest first) to prioritize recent actions
+ // This ensures recent play state is synced first, even if total actions > limit
queryParts = append(queryParts, "ORDER BY e.Timestamp DESC")
+
+ // Add LIMIT for performance - prevents returning massive datasets
+ // Clients should use the 'since' parameter to paginate through results
+ if database.IsPostgreSQLDB() {
+ queryParts = append(queryParts, fmt.Sprintf("LIMIT %d", MAX_EPISODE_ACTIONS))
+ } else {
+ queryParts = append(queryParts, fmt.Sprintf("LIMIT %d", MAX_EPISODE_ACTIONS))
+ }
+
query = strings.Join(queryParts, " ")
}
- // Execute query
+ // Execute query with timing
+ startTime := time.Now()
rows, err := database.Query(query, args...)
+ queryDuration := time.Since(startTime)
+
if err != nil {
- log.Printf("[ERROR] getEpisodeActions: Error querying episode actions: %v", err)
+ log.Printf("[ERROR] getEpisodeActions: Error querying episode actions (took %v): %v", queryDuration, err)
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to get episode actions"})
return
}
defer rows.Close()
+
+ log.Printf("[DEBUG] getEpisodeActions: Query executed in %v", queryDuration)
// Build response
actions := make([]models.EpisodeAction, 0)
@@ -343,8 +364,9 @@ func getEpisodeActions(database *db.Database) gin.HandlerFunc {
// Continue with what we've got so far
}
- log.Printf("[DEBUG] getEpisodeActions: Returning %d actions with timestamp %d",
- len(actions), latestTimestamp)
+ // Log performance results
+ totalDuration := time.Since(startTime)
+ log.Printf("[DEBUG] getEpisodeActions: Returning %d actions, total time: %v", len(actions), totalDuration)
// Return response in gpodder format
c.JSON(http.StatusOK, models.EpisodeActionsResponse{
@@ -357,7 +379,6 @@ func getEpisodeActions(database *db.Database) gin.HandlerFunc {
// uploadEpisodeActions handles POST /api/2/episodes/{username}.json
func uploadEpisodeActions(database *db.Database) gin.HandlerFunc {
return func(c *gin.Context) {
- log.Printf("[DEBUG] uploadEpisodeActions handling request: %s %s", c.Request.Method, c.Request.URL.Path)
// Get user ID from middleware
userID, exists := c.Get("userID")
@@ -384,8 +405,6 @@ func uploadEpisodeActions(database *db.Database) gin.HandlerFunc {
actions = wrappedActions.Actions
}
- log.Printf("[DEBUG] uploadEpisodeActions: Received %d actions to process", len(actions))
-
// Begin transaction
tx, err := database.Begin()
if err != nil {
@@ -447,7 +466,6 @@ func uploadEpisodeActions(database *db.Database) gin.HandlerFunc {
if err != nil {
if err == sql.ErrNoRows {
// Create the device if it doesn't exist
- log.Printf("[DEBUG] uploadEpisodeActions: Creating new device: %s", action.Device)
if database.IsPostgreSQLDB() {
query = `
@@ -527,8 +545,6 @@ func uploadEpisodeActions(database *db.Database) gin.HandlerFunc {
if parsedTime, err := time.Parse(format, t); err == nil {
actionTimestamp = parsedTime.Unix()
parsed = true
- log.Printf("[DEBUG] uploadEpisodeActions: Parsed timestamp '%s' with format '%s' to Unix timestamp %d",
- t, format, actionTimestamp)
break
}
}
@@ -673,8 +689,6 @@ func uploadEpisodeActions(database *db.Database) gin.HandlerFunc {
return
}
- log.Printf("[DEBUG] uploadEpisodeActions: Successfully processed %d actions", len(actions))
-
// Return response
c.JSON(http.StatusOK, models.EpisodeActionResponse{
Timestamp: timestamp,
diff --git a/gpodder-api/internal/api/subscriptions.go b/gpodder-api/internal/api/subscriptions.go
index 980c9566..506b9dda 100644
--- a/gpodder-api/internal/api/subscriptions.go
+++ b/gpodder-api/internal/api/subscriptions.go
@@ -22,6 +22,9 @@ import (
// Maximum number of subscriptions per user
const MAX_SUBSCRIPTIONS = 5000
+// Limits for subscription sync to prevent overwhelming responses
+const MAX_SUBSCRIPTION_CHANGES = 5000 // Reasonable limit for subscription changes per sync
+
// sanitizeURL cleans and validates a URL
func sanitizeURL(rawURL string) (string, error) {
// Trim leading/trailing whitespace
@@ -63,14 +66,16 @@ func sanitizeURL(rawURL string) (string, error) {
// getSubscriptions handles GET /api/2/subscriptions/{username}/{deviceid}
func getSubscriptions(database *db.Database) gin.HandlerFunc {
return func(c *gin.Context) {
- log.Printf("[DEBUG] getSubscriptions handling request: %s %s", c.Request.Method, c.Request.URL.Path)
+ log.Printf("[DEBUG] getSubscriptions: Starting request processing - %s %s", c.Request.Method, c.Request.URL.Path)
// Get user ID from middleware
userID, exists := c.Get("userID")
if !exists {
+ log.Printf("[ERROR] getSubscriptions: userID not found in context")
c.JSON(http.StatusUnauthorized, gin.H{"error": "Unauthorized"})
return
}
+ log.Printf("[DEBUG] getSubscriptions: userID found: %v", userID)
// Get device ID from URL - with fix for .json suffix
deviceName := c.Param("deviceid")
@@ -185,8 +190,8 @@ func getSubscriptions(database *db.Database) gin.HandlerFunc {
}
defer rows.Close()
- // Build subscription list
- var podcasts []string
+ // Build subscription list - ensure never nil
+ podcasts := make([]string, 0)
for rows.Next() {
var url string
if err := rows.Scan(&url); err != nil {
@@ -225,6 +230,11 @@ func getSubscriptions(database *db.Database) gin.HandlerFunc {
}
// Return subscriptions in gpodder format, ensuring backward compatibility
+ // CRITICAL FIX for issue #636: Ensure arrays are never nil - AntennaPod requires arrays, not null
+ if podcasts == nil {
+ podcasts = []string{}
+ }
+
response := gin.H{
"add": podcasts,
"remove": []string{},
@@ -238,45 +248,37 @@ func getSubscriptions(database *db.Database) gin.HandlerFunc {
}
// Process actual changes since the timestamp
- // Query subscriptions added since the given timestamp
+ // Query subscriptions added since the given timestamp - simplified for performance
var addRows *sql.Rows
if database.IsPostgreSQLDB() {
query = `
- SELECT DISTINCT s.PodcastURL
- FROM "GpodderSyncSubscriptions" s
- WHERE s.UserID = $1
- AND s.DeviceID != $2
- AND s.Timestamp > $3
- AND s.Action = 'add'
- AND NOT EXISTS (
- SELECT 1 FROM "GpodderSyncSubscriptions" s2
- WHERE s2.UserID = s.UserID
- AND s2.PodcastURL = s.PodcastURL
- AND s2.DeviceID = $2
- AND s2.Timestamp > s.Timestamp
- AND s2.Action = 'add'
- )
+ SELECT s.PodcastURL
+ FROM "GpodderSyncSubscriptions" s
+ WHERE s.UserID = $1
+ AND s.DeviceID != $2
+ AND s.Timestamp > $3
+ AND s.Action = 'add'
+ GROUP BY s.PodcastURL
+ ORDER BY MAX(s.Timestamp) DESC
+ LIMIT $4
`
- addRows, err = database.Query(query, userID, deviceID, since)
+ log.Printf("[DEBUG] getSubscriptions: Executing add query with limit %d", MAX_SUBSCRIPTION_CHANGES)
+ addRows, err = database.Query(query, userID, deviceID, since, MAX_SUBSCRIPTION_CHANGES)
} else {
query = `
- SELECT DISTINCT s.PodcastURL
- FROM GpodderSyncSubscriptions s
- WHERE s.UserID = ?
- AND s.DeviceID != ?
- AND s.Timestamp > ?
- AND s.Action = 'add'
- AND NOT EXISTS (
- SELECT 1 FROM GpodderSyncSubscriptions s2
- WHERE s2.UserID = s.UserID
- AND s2.PodcastURL = s.PodcastURL
- AND s2.DeviceID = ?
- AND s2.Timestamp > s.Timestamp
- AND s2.Action = 'add'
- )
+ SELECT s.PodcastURL
+ FROM GpodderSyncSubscriptions s
+ WHERE s.UserID = ?
+ AND s.DeviceID != ?
+ AND s.Timestamp > ?
+ AND s.Action = 'add'
+ GROUP BY s.PodcastURL
+ ORDER BY MAX(s.Timestamp) DESC
+ LIMIT ?
`
- addRows, err = database.Query(query, userID, deviceID, since, deviceID)
+ log.Printf("[DEBUG] getSubscriptions: Executing add query with limit %d", MAX_SUBSCRIPTION_CHANGES)
+ addRows, err = database.Query(query, userID, deviceID, since, MAX_SUBSCRIPTION_CHANGES)
}
if err != nil {
@@ -286,7 +288,8 @@ func getSubscriptions(database *db.Database) gin.HandlerFunc {
}
defer addRows.Close()
- addList := []string{}
+ // Ensure addList is never nil
+ addList := make([]string, 0)
for addRows.Next() {
var url string
if err := addRows.Scan(&url); err != nil {
@@ -296,45 +299,35 @@ func getSubscriptions(database *db.Database) gin.HandlerFunc {
addList = append(addList, url)
}
- // Query subscriptions removed since the given timestamp
+ // Query subscriptions removed since the given timestamp - simplified for performance
var removeRows *sql.Rows
if database.IsPostgreSQLDB() {
query = `
- SELECT DISTINCT s.PodcastURL
- FROM "GpodderSyncSubscriptions" s
- WHERE s.UserID = $1
- AND s.DeviceID != $2
- AND s.Timestamp > $3
- AND s.Action = 'remove'
- AND NOT EXISTS (
- SELECT 1 FROM "GpodderSyncSubscriptions" s2
- WHERE s2.UserID = s.UserID
- AND s2.PodcastURL = s.PodcastURL
- AND s2.DeviceID = $2
- AND s2.Timestamp > s.Timestamp
- AND s2.Action = 'add'
- )
+ SELECT s.PodcastURL
+ FROM "GpodderSyncSubscriptions" s
+ WHERE s.UserID = $1
+ AND s.DeviceID != $2
+ AND s.Timestamp > $3
+ AND s.Action = 'remove'
+ GROUP BY s.PodcastURL
+ ORDER BY MAX(s.Timestamp) DESC
+ LIMIT $4
`
- removeRows, err = database.Query(query, userID, deviceID, since)
+ removeRows, err = database.Query(query, userID, deviceID, since, MAX_SUBSCRIPTION_CHANGES)
} else {
query = `
- SELECT DISTINCT s.PodcastURL
- FROM GpodderSyncSubscriptions s
- WHERE s.UserID = ?
- AND s.DeviceID != ?
- AND s.Timestamp > ?
- AND s.Action = 'remove'
- AND NOT EXISTS (
- SELECT 1 FROM GpodderSyncSubscriptions s2
- WHERE s2.UserID = s.UserID
- AND s2.PodcastURL = s.PodcastURL
- AND s2.DeviceID = ?
- AND s2.Timestamp > s.Timestamp
- AND s2.Action = 'add'
- )
+ SELECT s.PodcastURL
+ FROM GpodderSyncSubscriptions s
+ WHERE s.UserID = ?
+ AND s.DeviceID != ?
+ AND s.Timestamp > ?
+ AND s.Action = 'remove'
+ GROUP BY s.PodcastURL
+ ORDER BY MAX(s.Timestamp) DESC
+ LIMIT ?
`
- removeRows, err = database.Query(query, userID, deviceID, since, deviceID)
+ removeRows, err = database.Query(query, userID, deviceID, since, MAX_SUBSCRIPTION_CHANGES)
}
if err != nil {
@@ -344,7 +337,8 @@ func getSubscriptions(database *db.Database) gin.HandlerFunc {
}
defer removeRows.Close()
- removeList := []string{}
+ // Ensure removeList is never nil
+ removeList := make([]string, 0)
for removeRows.Next() {
var url string
if err := removeRows.Scan(&url); err != nil {
@@ -378,6 +372,14 @@ func getSubscriptions(database *db.Database) gin.HandlerFunc {
log.Printf("[WARNING] Error updating device last sync time: %v", err)
}
+ // CRITICAL FIX for issue #636: Ensure arrays are never nil - AntennaPod requires arrays, not null
+ if addList == nil {
+ addList = []string{}
+ }
+ if removeList == nil {
+ removeList = []string{}
+ }
+
response := gin.H{
"add": addList,
"remove": removeList,
@@ -482,8 +484,8 @@ func getSubscriptions(database *db.Database) gin.HandlerFunc {
}
defer rows.Close()
- // Build response - ONLY ITERATE ONCE
- var urls []string
+ // Build response - ensure never nil
+ urls := make([]string, 0)
for rows.Next() {
var url string
if err := rows.Scan(&url); err != nil {
@@ -1001,7 +1003,7 @@ func uploadSubscriptionChanges(database *db.Database) gin.HandlerFunc {
// Process subscriptions to add
timestamp := time.Now().Unix()
- updateURLs := make([][]string, 0)
+ updateURLs := make([][]string, 0) // Ensure never nil
for _, url := range changes.Add {
// Clean URL
@@ -1299,8 +1301,8 @@ func getAllSubscriptions(database *db.Database) gin.HandlerFunc {
}
defer rows.Close()
- // Build response
- var urls []string
+ // Build response - ensure never nil
+ urls := make([]string, 0)
for rows.Next() {
var url string
if err := rows.Scan(&url); err != nil {
@@ -1425,8 +1427,8 @@ func getSubscriptionsSimple(database *db.Database) gin.HandlerFunc {
}
defer rows.Close()
- // Build response
- var urls []string
+ // Build response - ensure never nil
+ urls := make([]string, 0)
for rows.Next() {
var url string
if err := rows.Scan(&url); err != nil {
diff --git a/gpodder-api/internal/db/database.go b/gpodder-api/internal/db/database.go
index 4eca2a31..4b4aa544 100644
--- a/gpodder-api/internal/db/database.go
+++ b/gpodder-api/internal/db/database.go
@@ -61,44 +61,19 @@ func NewDatabase(cfg config.DatabaseConfig) (*Database, error) {
fmt.Println("Successfully connected to the database")
- // Run migrations with retry logic for table dependencies
- if err := runMigrationsWithRetry(db, cfg.Type); err != nil {
- db.Close()
- return nil, fmt.Errorf("failed to run migrations: %w", err)
- }
+ // Migrations are now handled by the Python migration system
+ // Skip Go migrations to avoid conflicts
+ log.Println("Skipping Go migrations - now handled by Python migration system")
return &Database{DB: db, Type: cfg.Type}, nil
}
-// runMigrationsWithRetry runs migrations with retry logic for dependency issues
-func runMigrationsWithRetry(db *sql.DB, dbType string) error {
- maxRetries := 10
- retryDelay := 3 * time.Second
-
- for attempt := 1; attempt <= maxRetries; attempt++ {
- err := RunMigrations(db, dbType)
- if err == nil {
- log.Println("Migrations completed successfully")
- return nil
- }
-
- // Check if the error is due to missing prerequisite tables
- if strings.Contains(err.Error(), "required table") && strings.Contains(err.Error(), "does not exist") {
- log.Printf("Attempt %d/%d: Required PinePods tables not ready yet, retrying in %v... Error: %v",
- attempt, maxRetries, retryDelay, err)
-
- if attempt < maxRetries {
- time.Sleep(retryDelay)
- continue
- }
- }
-
- // For other errors, fail immediately
- return err
- }
-
- return fmt.Errorf("failed to run migrations after %d attempts", maxRetries)
-}
+// runMigrationsWithRetry - DISABLED: migrations now handled by Python system
+// func runMigrationsWithRetry(db *sql.DB, dbType string) error {
+// All migration logic has been moved to the Python migration system
+// to ensure consistency and centralized management
+// This function is kept for reference but is no longer used
+// }
// connectPostgreSQL connects to a PostgreSQL database
func connectPostgreSQL(cfg config.DatabaseConfig) (*sql.DB, error) {
diff --git a/gpodder-api/internal/db/postgres.go b/gpodder-api/internal/db/postgres.go
index e7561664..08b5bcb9 100644
--- a/gpodder-api/internal/db/postgres.go
+++ b/gpodder-api/internal/db/postgres.go
@@ -78,11 +78,9 @@ func NewPostgresDB(cfg config.DatabaseConfig) (*PostgresDB, error) {
fmt.Println("Successfully connected to the database")
- // Run migrations to ensure schema is up to date
- if err := RunMigrations(db, "postgresql"); err != nil {
- db.Close()
- return nil, fmt.Errorf("failed to run migrations: %w", err)
- }
+ // Migrations are now handled by the Python migration system
+ // Skip Go migrations to avoid conflicts
+ fmt.Println("Skipping Go migrations - now handled by Python migration system")
return &PostgresDB{DB: db}, nil
}
diff --git a/gpodder-api/internal/models/models.go b/gpodder-api/internal/models/models.go
index a0c5402d..14500a60 100644
--- a/gpodder-api/internal/models/models.go
+++ b/gpodder-api/internal/models/models.go
@@ -1,6 +1,7 @@
package models
import (
+ "encoding/json"
"time"
)
@@ -51,7 +52,7 @@ type SubscriptionResponse struct {
Add []string `json:"add"`
Remove []string `json:"remove"`
Timestamp int64 `json:"timestamp"`
- UpdateURLs [][]string `json:"update_urls,omitempty"`
+ UpdateURLs [][]string `json:"update_urls"` // Removed omitempty to ensure field is always present
}
// EpisodeAction represents an action performed on an episode
@@ -69,12 +70,55 @@ type EpisodeAction struct {
Episode string `json:"episode"`
Device string `json:"device,omitempty"`
Action string `json:"action"`
- Timestamp interface{} `json:"timestamp"` // Accept any type
+ Timestamp interface{} `json:"-"` // Accept any type internally, but customize JSON output
Started *int `json:"started,omitempty"`
Position *int `json:"position,omitempty"`
Total *int `json:"total,omitempty"`
}
+// MarshalJSON customizes JSON serialization to format timestamp as ISO 8601 string
+// AntennaPod expects format: "yyyy-MM-dd'T'HH:mm:ss" (without Z timezone indicator)
+func (e EpisodeAction) MarshalJSON() ([]byte, error) {
+ type Alias EpisodeAction
+
+ // Convert timestamp to Unix seconds
+ var unixTimestamp int64
+ switch t := e.Timestamp.(type) {
+ case int64:
+ unixTimestamp = t
+ case int:
+ unixTimestamp = int64(t)
+ case float64:
+ unixTimestamp = int64(t)
+ default:
+ // Default to current time if timestamp is invalid
+ unixTimestamp = time.Now().Unix()
+ }
+
+ // Format as ISO 8601 without timezone (AntennaPod requirement)
+ timestampStr := time.Unix(unixTimestamp, 0).UTC().Format("2006-01-02T15:04:05")
+
+ return json.Marshal(&struct {
+ Podcast string `json:"podcast"`
+ Episode string `json:"episode"`
+ Device string `json:"device,omitempty"`
+ Action string `json:"action"`
+ Timestamp string `json:"timestamp"`
+ Started *int `json:"started,omitempty"`
+ Position *int `json:"position,omitempty"`
+ Total *int `json:"total,omitempty"`
+ }{
+ Podcast: e.Podcast,
+ Episode: e.Episode,
+ Device: e.Device,
+ Action: e.Action,
+ Timestamp: timestampStr,
+ Started: e.Started,
+ Position: e.Position,
+ Total: e.Total,
+ })
+}
+
// EpisodeActionResponse represents a response to episode action upload
type EpisodeActionResponse struct {
Timestamp int64 `json:"timestamp"`
diff --git a/images/Download_on_the_App_Store_Badge_US-UK_RGB_blk_092917.svg b/images/Download_on_the_App_Store_Badge_US-UK_RGB_blk_092917.svg
new file mode 100755
index 00000000..072b425a
--- /dev/null
+++ b/images/Download_on_the_App_Store_Badge_US-UK_RGB_blk_092917.svg
@@ -0,0 +1,46 @@
+
diff --git a/images/badge_obtainium.png b/images/badge_obtainium.png
new file mode 100644
index 00000000..a4cf4f9c
Binary files /dev/null and b/images/badge_obtainium.png differ
diff --git a/mobile/android/app/build.gradle b/mobile/android/app/build.gradle
index ff6b413d..25daa62e 100644
--- a/mobile/android/app/build.gradle
+++ b/mobile/android/app/build.gradle
@@ -29,7 +29,7 @@ if (keystorePropertiesFile.exists()) {
}
android {
- compileSdk 35
+ compileSdk 36
ndkVersion flutter.ndkVersion
compileOptions {
@@ -43,13 +43,20 @@ android {
defaultConfig {
applicationId "com.gooseberrydevelopment.pinepods"
- minSdkVersion 22
- targetSdkVersion 34
+ minSdkVersion flutter.minSdkVersion
+ targetSdkVersion 36
versionCode flutterVersionCode.toInteger()
versionName flutterVersionName
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
}
+ dependenciesInfo {
+ // Disables dependency metadata when building APKs (for IzzyOnDroid/F-Droid)
+ includeInApk = false
+ // Disables dependency metadata when building Android App Bundles (for Google Play)
+ includeInBundle = false
+ }
+
signingConfigs {
release {
keyAlias keystoreProperties['keyAlias']
@@ -78,6 +85,11 @@ android {
abortOnError false
disable 'InvalidPackage'
}
+
+ // Disable PNG crunching for reproducible builds
+ aaptOptions {
+ cruncherEnabled = false
+ }
}
flutter {
diff --git a/mobile/android/app/src/main/AndroidManifest.xml b/mobile/android/app/src/main/AndroidManifest.xml
index 1b1b6f74..526a7034 100644
--- a/mobile/android/app/src/main/AndroidManifest.xml
+++ b/mobile/android/app/src/main/AndroidManifest.xml
@@ -9,6 +9,20 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
@@ -45,11 +59,23 @@
-
+
+
+
+
+
+
+
+
+
+
+
+
+
CFBundleVersion
1.0
MinimumOSVersion
- 12.0
+ 13.0
diff --git a/mobile/ios/Flutter/Generated.xcconfig b/mobile/ios/Flutter/Generated.xcconfig
index e5a4dc4e..b4e66a46 100644
--- a/mobile/ios/Flutter/Generated.xcconfig
+++ b/mobile/ios/Flutter/Generated.xcconfig
@@ -2,13 +2,15 @@
FLUTTER_ROOT=/Users/collin_pendleton/development/flutter
FLUTTER_APPLICATION_PATH=/Users/collin_pendleton/Documents/github/PinePods/mobile
COCOAPODS_PARALLEL_CODE_SIGN=true
-FLUTTER_TARGET=lib/main.dart
+FLUTTER_TARGET=/Users/collin_pendleton/Documents/github/PinePods/mobile/lib/main.dart
FLUTTER_BUILD_DIR=build
-FLUTTER_BUILD_NAME=0.7.9001
-FLUTTER_BUILD_NUMBER=0.7.9001
+FLUTTER_BUILD_NAME=0.8.0
+FLUTTER_BUILD_NUMBER=20252161
+FLUTTER_CLI_BUILD_MODE=debug
EXCLUDED_ARCHS[sdk=iphonesimulator*]=i386
EXCLUDED_ARCHS[sdk=iphoneos*]=armv7
+DART_DEFINES=RkxVVFRFUl9WRVJTSU9OPTMuMzUuMg==,RkxVVFRFUl9DSEFOTkVMPXN0YWJsZQ==,RkxVVFRFUl9HSVRfVVJMPWh0dHBzOi8vZ2l0aHViLmNvbS9mbHV0dGVyL2ZsdXR0ZXIuZ2l0,RkxVVFRFUl9GUkFNRVdPUktfUkVWSVNJT049MDVkYjk2ODkwOA==,RkxVVFRFUl9FTkdJTkVfUkVWSVNJT049YThiZmRmYzM5NA==,RkxVVFRFUl9EQVJUX1ZFUlNJT049My45LjA=
DART_OBFUSCATION=false
TRACK_WIDGET_CREATION=true
TREE_SHAKE_ICONS=false
-PACKAGE_CONFIG=.dart_tool/package_config.json
+PACKAGE_CONFIG=/Users/collin_pendleton/Documents/github/PinePods/mobile/.dart_tool/package_config.json
diff --git a/mobile/ios/Flutter/flutter_export_environment.sh b/mobile/ios/Flutter/flutter_export_environment.sh
index 9ee246b3..9e4d5f5f 100755
--- a/mobile/ios/Flutter/flutter_export_environment.sh
+++ b/mobile/ios/Flutter/flutter_export_environment.sh
@@ -3,11 +3,13 @@
export "FLUTTER_ROOT=/Users/collin_pendleton/development/flutter"
export "FLUTTER_APPLICATION_PATH=/Users/collin_pendleton/Documents/github/PinePods/mobile"
export "COCOAPODS_PARALLEL_CODE_SIGN=true"
-export "FLUTTER_TARGET=lib/main.dart"
+export "FLUTTER_TARGET=/Users/collin_pendleton/Documents/github/PinePods/mobile/lib/main.dart"
export "FLUTTER_BUILD_DIR=build"
-export "FLUTTER_BUILD_NAME=0.7.9"
-export "FLUTTER_BUILD_NUMBER=0.7.9"
+export "FLUTTER_BUILD_NAME=0.8.0"
+export "FLUTTER_BUILD_NUMBER=20252161"
+export "FLUTTER_CLI_BUILD_MODE=debug"
+export "DART_DEFINES=RkxVVFRFUl9WRVJTSU9OPTMuMzUuMg==,RkxVVFRFUl9DSEFOTkVMPXN0YWJsZQ==,RkxVVFRFUl9HSVRfVVJMPWh0dHBzOi8vZ2l0aHViLmNvbS9mbHV0dGVyL2ZsdXR0ZXIuZ2l0,RkxVVFRFUl9GUkFNRVdPUktfUkVWSVNJT049MDVkYjk2ODkwOA==,RkxVVFRFUl9FTkdJTkVfUkVWSVNJT049YThiZmRmYzM5NA==,RkxVVFRFUl9EQVJUX1ZFUlNJT049My45LjA="
export "DART_OBFUSCATION=false"
export "TRACK_WIDGET_CREATION=true"
export "TREE_SHAKE_ICONS=false"
-export "PACKAGE_CONFIG=.dart_tool/package_config.json"
+export "PACKAGE_CONFIG=/Users/collin_pendleton/Documents/github/PinePods/mobile/.dart_tool/package_config.json"
diff --git a/mobile/ios/Podfile b/mobile/ios/Podfile
index cc146a7b..37078cf6 100644
--- a/mobile/ios/Podfile
+++ b/mobile/ios/Podfile
@@ -1,5 +1,5 @@
# Uncomment this line to define a global platform for your project
-platform :ios, '12.0'
+platform :ios, '13.0'
# CocoaPods analytics sends network stats synchronously affecting flutter build latency.
ENV['COCOAPODS_DISABLE_STATS'] = 'true'
@@ -42,6 +42,8 @@ post_install do |installer|
flutter_additional_ios_build_settings(target)
target.build_configurations.each do |config|
+ # Ensure minimum deployment target is 13.0
+ config.build_settings['IPHONEOS_DEPLOYMENT_TARGET'] = '13.0'
config.build_settings['GCC_PREPROCESSOR_DEFINITIONS'] ||= [
'$(inherited)',
diff --git a/mobile/ios/Podfile.lock b/mobile/ios/Podfile.lock
new file mode 100644
index 00000000..65b45558
--- /dev/null
+++ b/mobile/ios/Podfile.lock
@@ -0,0 +1,165 @@
+PODS:
+ - app_links (0.0.2):
+ - Flutter
+ - audio_service (0.0.1):
+ - Flutter
+ - FlutterMacOS
+ - audio_session (0.0.1):
+ - Flutter
+ - connectivity_plus (0.0.1):
+ - Flutter
+ - device_info_plus (0.0.1):
+ - Flutter
+ - DKImagePickerController/Core (4.3.9):
+ - DKImagePickerController/ImageDataManager
+ - DKImagePickerController/Resource
+ - DKImagePickerController/ImageDataManager (4.3.9)
+ - DKImagePickerController/PhotoGallery (4.3.9):
+ - DKImagePickerController/Core
+ - DKPhotoGallery
+ - DKImagePickerController/Resource (4.3.9)
+ - DKPhotoGallery (0.0.19):
+ - DKPhotoGallery/Core (= 0.0.19)
+ - DKPhotoGallery/Model (= 0.0.19)
+ - DKPhotoGallery/Preview (= 0.0.19)
+ - DKPhotoGallery/Resource (= 0.0.19)
+ - SDWebImage
+ - SwiftyGif
+ - DKPhotoGallery/Core (0.0.19):
+ - DKPhotoGallery/Model
+ - DKPhotoGallery/Preview
+ - SDWebImage
+ - SwiftyGif
+ - DKPhotoGallery/Model (0.0.19):
+ - SDWebImage
+ - SwiftyGif
+ - DKPhotoGallery/Preview (0.0.19):
+ - DKPhotoGallery/Model
+ - DKPhotoGallery/Resource
+ - SDWebImage
+ - SwiftyGif
+ - DKPhotoGallery/Resource (0.0.19):
+ - SDWebImage
+ - SwiftyGif
+ - file_picker (0.0.1):
+ - DKImagePickerController/PhotoGallery
+ - Flutter
+ - Flutter (1.0.0)
+ - flutter_downloader (0.0.1):
+ - Flutter
+ - just_audio (0.0.1):
+ - Flutter
+ - FlutterMacOS
+ - package_info_plus (0.4.5):
+ - Flutter
+ - path_provider_foundation (0.0.1):
+ - Flutter
+ - FlutterMacOS
+ - permission_handler_apple (9.3.0):
+ - Flutter
+ - SDWebImage (5.21.1):
+ - SDWebImage/Core (= 5.21.1)
+ - SDWebImage/Core (5.21.1)
+ - share_plus (0.0.1):
+ - Flutter
+ - shared_preferences_foundation (0.0.1):
+ - Flutter
+ - FlutterMacOS
+ - sqflite_darwin (0.0.4):
+ - Flutter
+ - FlutterMacOS
+ - SwiftyGif (5.4.5)
+ - url_launcher_ios (0.0.1):
+ - Flutter
+ - webview_flutter_wkwebview (0.0.1):
+ - Flutter
+ - FlutterMacOS
+
+DEPENDENCIES:
+ - app_links (from `.symlinks/plugins/app_links/ios`)
+ - audio_service (from `.symlinks/plugins/audio_service/darwin`)
+ - audio_session (from `.symlinks/plugins/audio_session/ios`)
+ - connectivity_plus (from `.symlinks/plugins/connectivity_plus/ios`)
+ - device_info_plus (from `.symlinks/plugins/device_info_plus/ios`)
+ - file_picker (from `.symlinks/plugins/file_picker/ios`)
+ - Flutter (from `Flutter`)
+ - flutter_downloader (from `.symlinks/plugins/flutter_downloader/ios`)
+ - just_audio (from `.symlinks/plugins/just_audio/darwin`)
+ - package_info_plus (from `.symlinks/plugins/package_info_plus/ios`)
+ - path_provider_foundation (from `.symlinks/plugins/path_provider_foundation/darwin`)
+ - permission_handler_apple (from `.symlinks/plugins/permission_handler_apple/ios`)
+ - share_plus (from `.symlinks/plugins/share_plus/ios`)
+ - shared_preferences_foundation (from `.symlinks/plugins/shared_preferences_foundation/darwin`)
+ - sqflite_darwin (from `.symlinks/plugins/sqflite_darwin/darwin`)
+ - url_launcher_ios (from `.symlinks/plugins/url_launcher_ios/ios`)
+ - webview_flutter_wkwebview (from `.symlinks/plugins/webview_flutter_wkwebview/darwin`)
+
+SPEC REPOS:
+ trunk:
+ - DKImagePickerController
+ - DKPhotoGallery
+ - SDWebImage
+ - SwiftyGif
+
+EXTERNAL SOURCES:
+ app_links:
+ :path: ".symlinks/plugins/app_links/ios"
+ audio_service:
+ :path: ".symlinks/plugins/audio_service/darwin"
+ audio_session:
+ :path: ".symlinks/plugins/audio_session/ios"
+ connectivity_plus:
+ :path: ".symlinks/plugins/connectivity_plus/ios"
+ device_info_plus:
+ :path: ".symlinks/plugins/device_info_plus/ios"
+ file_picker:
+ :path: ".symlinks/plugins/file_picker/ios"
+ Flutter:
+ :path: Flutter
+ flutter_downloader:
+ :path: ".symlinks/plugins/flutter_downloader/ios"
+ just_audio:
+ :path: ".symlinks/plugins/just_audio/darwin"
+ package_info_plus:
+ :path: ".symlinks/plugins/package_info_plus/ios"
+ path_provider_foundation:
+ :path: ".symlinks/plugins/path_provider_foundation/darwin"
+ permission_handler_apple:
+ :path: ".symlinks/plugins/permission_handler_apple/ios"
+ share_plus:
+ :path: ".symlinks/plugins/share_plus/ios"
+ shared_preferences_foundation:
+ :path: ".symlinks/plugins/shared_preferences_foundation/darwin"
+ sqflite_darwin:
+ :path: ".symlinks/plugins/sqflite_darwin/darwin"
+ url_launcher_ios:
+ :path: ".symlinks/plugins/url_launcher_ios/ios"
+ webview_flutter_wkwebview:
+ :path: ".symlinks/plugins/webview_flutter_wkwebview/darwin"
+
+SPEC CHECKSUMS:
+ app_links: 76b66b60cc809390ca1ad69bfd66b998d2387ac7
+ audio_service: aa99a6ba2ae7565996015322b0bb024e1d25c6fd
+ audio_session: 9bb7f6c970f21241b19f5a3658097ae459681ba0
+ connectivity_plus: cb623214f4e1f6ef8fe7403d580fdad517d2f7dd
+ device_info_plus: 21fcca2080fbcd348be798aa36c3e5ed849eefbe
+ DKImagePickerController: 946cec48c7873164274ecc4624d19e3da4c1ef3c
+ DKPhotoGallery: b3834fecb755ee09a593d7c9e389d8b5d6deed60
+ file_picker: a0560bc09d61de87f12d246fc47d2119e6ef37be
+ Flutter: cabc95a1d2626b1b06e7179b784ebcf0c0cde467
+ flutter_downloader: 78da0da1084e709cbfd3b723c7ea349c71681f09
+ just_audio: 4e391f57b79cad2b0674030a00453ca5ce817eed
+ package_info_plus: af8e2ca6888548050f16fa2f1938db7b5a5df499
+ path_provider_foundation: 080d55be775b7414fd5a5ef3ac137b97b097e564
+ permission_handler_apple: 4ed2196e43d0651e8ff7ca3483a069d469701f2d
+ SDWebImage: f29024626962457f3470184232766516dee8dfea
+ share_plus: 50da8cb520a8f0f65671c6c6a99b3617ed10a58a
+ shared_preferences_foundation: 9e1978ff2562383bd5676f64ec4e9aa8fa06a6f7
+ sqflite_darwin: 20b2a3a3b70e43edae938624ce550a3cbf66a3d0
+ SwiftyGif: 706c60cf65fa2bc5ee0313beece843c8eb8194d4
+ url_launcher_ios: 694010445543906933d732453a59da0a173ae33d
+ webview_flutter_wkwebview: 1821ceac936eba6f7984d89a9f3bcb4dea99ebb2
+
+PODFILE CHECKSUM: 0ab06865a10aced8dcbecd5fae08a60eea944bfe
+
+COCOAPODS: 1.16.2
diff --git a/mobile/ios/Runner.xcodeproj/project.pbxproj b/mobile/ios/Runner.xcodeproj/project.pbxproj
index c85d8d42..a3c5b47a 100644
--- a/mobile/ios/Runner.xcodeproj/project.pbxproj
+++ b/mobile/ios/Runner.xcodeproj/project.pbxproj
@@ -238,12 +238,12 @@
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
- shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" embed_and_thin";
+ shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" embed_and_thin\n";
};
9740EEB61CF901F6004384FC /* Run Script */ = {
isa = PBXShellScriptBuildPhase;
alwaysOutOfDate = 1;
- buildActionMask = 2147483647;
+ buildActionMask = 12;
files = (
);
inputPaths = (
@@ -253,7 +253,7 @@
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
- shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build";
+ shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build\n";
};
B2A05C7B67BBE001F257F90D /* [CP] Embed Pods Frameworks */ = {
isa = PBXShellScriptBuildPhase;
@@ -365,7 +365,7 @@
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
- IPHONEOS_DEPLOYMENT_TARGET = 12.0;
+ IPHONEOS_DEPLOYMENT_TARGET = 13.0;
MTL_ENABLE_DEBUG_INFO = NO;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
@@ -382,7 +382,7 @@
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CLANG_ENABLE_MODULES = YES;
CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)";
- DEVELOPMENT_TEAM = "";
+ DEVELOPMENT_TEAM = 879LYRSYW9;
ENABLE_BITCODE = NO;
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
@@ -397,8 +397,8 @@
"$(inherited)",
"$(PROJECT_DIR)/Flutter",
);
- MARKETING_VERSION = 1.1.0;
- PRODUCT_BUNDLE_IDENTIFIER = com.placeholder;
+ MARKETING_VERSION = 1.1.2;
+ PRODUCT_BUNDLE_IDENTIFIER = com.gooseberrydevelopment.pinepods;
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h";
SWIFT_VERSION = 5.0;
@@ -454,7 +454,7 @@
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
- IPHONEOS_DEPLOYMENT_TARGET = 12.0;
+ IPHONEOS_DEPLOYMENT_TARGET = 13.0;
MTL_ENABLE_DEBUG_INFO = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
@@ -504,7 +504,7 @@
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
- IPHONEOS_DEPLOYMENT_TARGET = 12.0;
+ IPHONEOS_DEPLOYMENT_TARGET = 13.0;
MTL_ENABLE_DEBUG_INFO = NO;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
@@ -523,7 +523,7 @@
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CLANG_ENABLE_MODULES = YES;
CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)";
- DEVELOPMENT_TEAM = "";
+ DEVELOPMENT_TEAM = 879LYRSYW9;
ENABLE_BITCODE = NO;
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
@@ -538,8 +538,8 @@
"$(inherited)",
"$(PROJECT_DIR)/Flutter",
);
- MARKETING_VERSION = 1.1.0;
- PRODUCT_BUNDLE_IDENTIFIER = com.placeholder;
+ MARKETING_VERSION = 1.1.2;
+ PRODUCT_BUNDLE_IDENTIFIER = com.gooseberrydevelopment.pinepods;
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h";
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
@@ -555,7 +555,7 @@
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
CLANG_ENABLE_MODULES = YES;
CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)";
- DEVELOPMENT_TEAM = "";
+ DEVELOPMENT_TEAM = 879LYRSYW9;
ENABLE_BITCODE = NO;
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
@@ -570,8 +570,8 @@
"$(inherited)",
"$(PROJECT_DIR)/Flutter",
);
- MARKETING_VERSION = 1.1.0;
- PRODUCT_BUNDLE_IDENTIFIER = com.placeholder;
+ MARKETING_VERSION = 1.1.2;
+ PRODUCT_BUNDLE_IDENTIFIER = com.gooseberrydevelopment.pinepods;
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_OBJC_BRIDGING_HEADER = "Runner/Runner-Bridging-Header.h";
SWIFT_VERSION = 5.0;
diff --git a/mobile/ios/Runner/GeneratedPluginRegistrant.m b/mobile/ios/Runner/GeneratedPluginRegistrant.m
index 0ef72e8c..b5204fdb 100644
--- a/mobile/ios/Runner/GeneratedPluginRegistrant.m
+++ b/mobile/ios/Runner/GeneratedPluginRegistrant.m
@@ -54,6 +54,12 @@
@import just_audio;
#endif
+#if __has_include()
+#import
+#else
+@import package_info_plus;
+#endif
+
#if __has_include()
#import
#else
@@ -90,6 +96,12 @@
@import url_launcher_ios;
#endif
+#if __has_include()
+#import
+#else
+@import webview_flutter_wkwebview;
+#endif
+
@implementation GeneratedPluginRegistrant
+ (void)registerWithRegistry:(NSObject*)registry {
@@ -101,12 +113,14 @@ + (void)registerWithRegistry:(NSObject*)registry {
[FilePickerPlugin registerWithRegistrar:[registry registrarForPlugin:@"FilePickerPlugin"]];
[FlutterDownloaderPlugin registerWithRegistrar:[registry registrarForPlugin:@"FlutterDownloaderPlugin"]];
[JustAudioPlugin registerWithRegistrar:[registry registrarForPlugin:@"JustAudioPlugin"]];
+ [FPPPackageInfoPlusPlugin registerWithRegistrar:[registry registrarForPlugin:@"FPPPackageInfoPlusPlugin"]];
[PathProviderPlugin registerWithRegistrar:[registry registrarForPlugin:@"PathProviderPlugin"]];
[PermissionHandlerPlugin registerWithRegistrar:[registry registrarForPlugin:@"PermissionHandlerPlugin"]];
[FPPSharePlusPlugin registerWithRegistrar:[registry registrarForPlugin:@"FPPSharePlusPlugin"]];
[SharedPreferencesPlugin registerWithRegistrar:[registry registrarForPlugin:@"SharedPreferencesPlugin"]];
[SqflitePlugin registerWithRegistrar:[registry registrarForPlugin:@"SqflitePlugin"]];
[URLLauncherPlugin registerWithRegistrar:[registry registrarForPlugin:@"URLLauncherPlugin"]];
+ [WebViewFlutterPlugin registerWithRegistrar:[registry registrarForPlugin:@"WebViewFlutterPlugin"]];
}
@end
diff --git a/mobile/ios/Runner/Info.plist b/mobile/ios/Runner/Info.plist
index 1004ac91..aa88be53 100644
--- a/mobile/ios/Runner/Info.plist
+++ b/mobile/ios/Runner/Info.plist
@@ -34,13 +34,30 @@
pinepods-subscribe
+
+ CFBundleTypeRole
+ Viewer
+ CFBundleURLName
+ com.gooseberrydevelopment.pinepods.auth
+ CFBundleURLSchemes
+
+ pinepods
+
+
CFBundleVersion
$(FLUTTER_BUILD_NUMBER)
FDMaximumConcurrentTasks
1
+ FlutterDeepLinkingEnabled
+ YES
ITSAppUsesNonExemptEncryption
+ LSApplicationQueriesSchemes
+
+ https
+ http
+
LSRequiresIPhoneOS
NSAppTransportSecurity
@@ -77,13 +94,6 @@
UIViewControllerBasedStatusBarAppearance
- FlutterDeepLinkingEnabled
- NO
- LSApplicationQueriesSchemes
-
- https
- http
-
UTExportedTypeDeclarations
diff --git a/mobile/lib/api/podcast/mobile_podcast_api.dart b/mobile/lib/api/podcast/mobile_podcast_api.dart
index 9e3ce50b..aa67a3ce 100644
--- a/mobile/lib/api/podcast/mobile_podcast_api.dart
+++ b/mobile/lib/api/podcast/mobile_podcast_api.dart
@@ -9,6 +9,8 @@ import 'package:pinepods_mobile/core/environment.dart';
import 'package:pinepods_mobile/entities/transcript.dart';
import 'package:flutter/foundation.dart';
import 'package:podcast_search/podcast_search.dart' as podcast_search;
+import 'package:http/http.dart' as http;
+import 'package:html/parser.dart' as html;
/// An implementation of the [PodcastApi].
///
@@ -92,6 +94,11 @@ class MobilePodcastApi extends PodcastApi {
@override
Future loadTranscript(TranscriptUrl transcriptUrl) async {
+ // Handle HTML transcripts with custom parser
+ if (transcriptUrl.type == TranscriptFormat.html) {
+ return await _loadHtmlTranscript(transcriptUrl);
+ }
+
late podcast_search.TranscriptFormat format;
switch (transcriptUrl.type) {
@@ -101,6 +108,10 @@ class MobilePodcastApi extends PodcastApi {
case TranscriptFormat.json:
format = podcast_search.TranscriptFormat.json;
break;
+ case TranscriptFormat.html:
+ // This case is now handled above
+ format = podcast_search.TranscriptFormat.unsupported;
+ break;
case TranscriptFormat.unsupported:
format = podcast_search.TranscriptFormat.unsupported;
break;
@@ -125,6 +136,61 @@ class MobilePodcastApi extends PodcastApi {
}
}
+ /// Parse HTML transcript content into a transcript object
+ Future _loadHtmlTranscript(TranscriptUrl transcriptUrl) async {
+ try {
+ final response = await http.get(Uri.parse(transcriptUrl.url));
+
+ if (response.statusCode != 200) {
+ return podcast_search.Transcript();
+ }
+
+ final document = html.parse(response.body);
+ final subtitles = [];
+
+ // For HTML transcripts, find the main content area and render as a single block
+ String transcriptContent = '';
+
+ // Try to find the main transcript content area
+ final transcriptContainer = document.querySelector('.transcript, .content, main, article') ??
+ document.querySelector('body');
+
+ if (transcriptContainer != null) {
+ transcriptContent = transcriptContainer.innerHtml;
+
+ // Clean up common unwanted elements
+ final cleanDoc = html.parse(transcriptContent);
+
+ // Remove navigation, headers, footers, ads, etc.
+ for (final selector in ['nav', 'header', 'footer', '.nav', '.navigation', '.ads', '.advertisement', '.sidebar']) {
+ cleanDoc.querySelectorAll(selector).forEach((el) => el.remove());
+ }
+
+ transcriptContent = cleanDoc.body?.innerHtml ?? transcriptContent;
+
+ // Process markdown-style links [text](url) -> text
+ transcriptContent = transcriptContent.replaceAllMapped(
+ RegExp(r'\[([^\]]+)\]\(([^)]+)\)'),
+ (match) => '${match.group(1)}',
+ );
+
+ // Create a single subtitle entry for the entire HTML transcript
+ subtitles.add(podcast_search.Subtitle(
+ index: 0,
+ start: const Duration(seconds: 0),
+ end: const Duration(seconds: 1), // Minimal duration since timing doesn't matter
+ data: '{{HTMLFULL}}$transcriptContent',
+ speaker: '',
+ ));
+ }
+
+ return podcast_search.Transcript(subtitles: subtitles);
+ } catch (e) {
+ debugPrint('Error parsing HTML transcript: $e');
+ return podcast_search.Transcript();
+ }
+ }
+
static Future _search(Map searchParams) {
var term = searchParams['term']!;
var provider = searchParams['searchProvider'] == 'itunes'
diff --git a/mobile/lib/bloc/podcast/podcast_bloc.dart b/mobile/lib/bloc/podcast/podcast_bloc.dart
index 784735a7..35067b98 100644
--- a/mobile/lib/bloc/podcast/podcast_bloc.dart
+++ b/mobile/lib/bloc/podcast/podcast_bloc.dart
@@ -12,6 +12,8 @@ import 'package:pinepods_mobile/services/download/download_service.dart';
import 'package:pinepods_mobile/services/download/mobile_download_service.dart';
import 'package:pinepods_mobile/services/podcast/podcast_service.dart';
import 'package:pinepods_mobile/services/settings/settings_service.dart';
+import 'package:pinepods_mobile/services/pinepods/pinepods_service.dart';
+import 'package:pinepods_mobile/entities/pinepods_search.dart';
import 'package:pinepods_mobile/state/bloc_state.dart';
import 'package:collection/collection.dart' show IterableExtension;
import 'package:logging/logging.dart';
@@ -305,19 +307,65 @@ class PodcastBloc extends Bloc {
switch (event) {
case PodcastEvent.subscribe:
if (_podcast != null) {
+ // Emit loading state for subscription
+ _podcastStream.add(BlocLoadingState(_podcast));
+
+ // First, subscribe locally
_podcast = await podcastService.subscribe(_podcast!);
- _podcastStream.add(BlocPopulatedState(results: _podcast));
- _loadSubscriptions();
- _episodesStream.add(_podcast?.episodes);
+
+ // Check if we're in a PinePods environment and also add to server
+ if (_podcast != null) {
+ try {
+ final settings = settingsService.settings;
+ if (settings != null &&
+ settings.pinepodsServer != null &&
+ settings.pinepodsApiKey != null &&
+ settings.pinepodsUserId != null) {
+
+ // Also add to PinePods server
+ final pinepodsService = PinepodsService();
+ pinepodsService.setCredentials(settings.pinepodsServer!, settings.pinepodsApiKey!);
+
+ final unifiedPodcast = UnifiedPinepodsPodcast(
+ id: 0,
+ indexId: 0,
+ title: _podcast!.title,
+ url: _podcast!.url ?? '',
+ originalUrl: _podcast!.url ?? '',
+ link: _podcast!.link ?? '',
+ description: _podcast!.description ?? '',
+ author: _podcast!.copyright ?? '',
+ ownerName: _podcast!.copyright ?? '',
+ image: _podcast!.imageUrl ?? '',
+ artwork: _podcast!.imageUrl ?? '',
+ lastUpdateTime: 0,
+ explicit: false,
+ episodeCount: 0,
+ );
+
+ await pinepodsService.addPodcast(unifiedPodcast, settings.pinepodsUserId!);
+ log.fine('Added podcast to PinePods server');
+ }
+ } catch (e) {
+ log.warning('Failed to add podcast to PinePods server: $e');
+ // Continue with local subscription even if server add fails
+ }
+
+ _episodes = _podcast!.episodes;
+ _podcastStream.add(BlocPopulatedState(results: _podcast));
+ _loadSubscriptions();
+ _refresh(); // Use _refresh to apply filters and update episode stream properly
+ }
}
break;
case PodcastEvent.unsubscribe:
if (_podcast != null) {
await podcastService.unsubscribe(_podcast!);
_podcast!.id = null;
+ _episodes = _podcast!.episodes;
_podcastStream.add(BlocPopulatedState(results: _podcast));
_loadSubscriptions();
- _episodesStream.add(_podcast!.episodes);
+ _refresh(); // Use _refresh to apply filters and update episode stream properly
}
break;
case PodcastEvent.markAllPlayed:
diff --git a/mobile/lib/core/environment.dart b/mobile/lib/core/environment.dart
index 862a04b4..121ec0d6 100644
--- a/mobile/lib/core/environment.dart
+++ b/mobile/lib/core/environment.dart
@@ -22,7 +22,7 @@ const userAgentAppString = String.fromEnvironment(
/// Link to a feedback form. This will be shown in the main overflow menu if set
const feedbackUrl = String.fromEnvironment('FEEDBACK_URL', defaultValue: '');
-/// This class stores version information for Anytime, including project version and
+/// This class stores version information for PinePods, including project version and
/// build number. This is then used for user agent strings when interacting with
/// APIs and RSS feeds.
///
@@ -32,8 +32,8 @@ class Environment {
static const _applicationName = 'Pinepods';
static const _applicationUrl =
'https://github.com/madeofpendletonwool/pinepods';
- static const _projectVersion = '0.7.9';
- static const _build = '166';
+ static const _projectVersion = '0.8.1';
+ static const _build = '20252203';
static var _agentString = userAgentAppString;
diff --git a/mobile/lib/core/extensions.dart b/mobile/lib/core/extensions.dart
index a3b351ed..859621cf 100644
--- a/mobile/lib/core/extensions.dart
+++ b/mobile/lib/core/extensions.dart
@@ -33,6 +33,17 @@ extension ExtString on String? {
final url = Uri.tryParse(this!);
if (url == null || !url.isScheme('http')) return this!;
+
+ // Don't force HTTPS for localhost or local IP addresses to support self-hosted development
+ final host = url.host.toLowerCase();
+ if (host == 'localhost' ||
+ host == '127.0.0.1' ||
+ host.startsWith('10.') ||
+ host.startsWith('192.168.') ||
+ host.startsWith('172.') ||
+ host.endsWith('.local')) {
+ return this!;
+ }
return url.replace(scheme: 'https').toString();
}
diff --git a/mobile/lib/core/utils.dart b/mobile/lib/core/utils.dart
index b4e28295..e013f1cb 100644
--- a/mobile/lib/core/utils.dart
+++ b/mobile/lib/core/utils.dart
@@ -64,7 +64,7 @@ Future getStorageDirectory() async {
directory = await getApplicationSupportDirectory();
}
- return join(directory.path, 'AnyTime');
+ return join(directory.path, 'PinePods');
}
Future hasExternalStorage() async {
diff --git a/mobile/lib/entities/chapter.dart b/mobile/lib/entities/chapter.dart
index 48b7a842..0b093b9e 100644
--- a/mobile/lib/entities/chapter.dart
+++ b/mobile/lib/entities/chapter.dart
@@ -56,8 +56,8 @@ class Chapter {
imageUrl: chapter['imageUrl'] as String?,
url: chapter['url'] as String?,
toc: chapter['toc'] == 'false' ? false : true,
- startTime: double.parse(chapter['startTime'] as String),
- endTime: double.parse(chapter['endTime'] as String),
+ startTime: double.tryParse(chapter['startTime'] as String? ?? '0') ?? 0.0,
+ endTime: double.tryParse(chapter['endTime'] as String? ?? '0') ?? 0.0,
);
}
diff --git a/mobile/lib/entities/home_data.dart b/mobile/lib/entities/home_data.dart
index b7c5fc2e..452047f7 100644
--- a/mobile/lib/entities/home_data.dart
+++ b/mobile/lib/entities/home_data.dart
@@ -40,7 +40,7 @@ class HomePodcast {
podcastIndexId: json['podcastindexid'],
artworkUrl: json['artworkurl'],
author: json['author'],
- categories: json['categories'],
+ categories: _parseCategories(json['categories']),
description: json['description'],
episodeCount: json['episodecount'],
feedUrl: json['feedurl'],
@@ -51,6 +51,22 @@ class HomePodcast {
totalListenTime: json['total_listen_time'],
);
}
+
+ /// Parse categories from either string or Map format
+ static String? _parseCategories(dynamic categories) {
+ if (categories == null) return null;
+
+ if (categories is String) {
+ // Old format - return as is
+ return categories;
+ } else if (categories is Map) {
+ // New format - convert map values to comma-separated string
+ if (categories.isEmpty) return null;
+ return categories.values.join(', ');
+ }
+
+ return null;
+ }
}
class HomeEpisode {
diff --git a/mobile/lib/entities/pinepods_search.dart b/mobile/lib/entities/pinepods_search.dart
index 7e84fbaa..9ec80525 100644
--- a/mobile/lib/entities/pinepods_search.dart
+++ b/mobile/lib/entities/pinepods_search.dart
@@ -279,8 +279,8 @@ class UnifiedPinepodsPodcast {
factory UnifiedPinepodsPodcast.fromPodcast(PinepodsPodcast podcast) {
return UnifiedPinepodsPodcast(
- id: podcast.id,
- indexId: podcast.id,
+ id: 0, // Internal database ID - will be fetched when needed
+ indexId: podcast.id, // Podcast index ID
title: podcast.title,
url: podcast.url,
originalUrl: podcast.originalUrl,
diff --git a/mobile/lib/entities/search_providers.dart b/mobile/lib/entities/search_providers.dart
index 45725460..f34b0929 100644
--- a/mobile/lib/entities/search_providers.dart
+++ b/mobile/lib/entities/search_providers.dart
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-/// Anytime can support multiple search providers.
+/// PinePods can support multiple search providers.
///
/// This class represents a provider.
class SearchProvider {
diff --git a/mobile/lib/entities/transcript.dart b/mobile/lib/entities/transcript.dart
index 226bde6e..5ca85ffa 100644
--- a/mobile/lib/entities/transcript.dart
+++ b/mobile/lib/entities/transcript.dart
@@ -8,6 +8,7 @@ import 'package:flutter/foundation.dart';
enum TranscriptFormat {
json,
subrip,
+ html,
unsupported,
}
@@ -39,9 +40,12 @@ class TranscriptUrl {
case TranscriptFormat.json:
t = 1;
break;
- case TranscriptFormat.unsupported:
+ case TranscriptFormat.html:
t = 2;
break;
+ case TranscriptFormat.unsupported:
+ t = 3;
+ break;
}
return {
@@ -65,6 +69,9 @@ class TranscriptUrl {
t = TranscriptFormat.json;
break;
case 2:
+ t = TranscriptFormat.html;
+ break;
+ case 3:
t = TranscriptFormat.unsupported;
break;
}
diff --git a/mobile/lib/l10n/L.dart b/mobile/lib/l10n/L.dart
index c8ec0229..2f971297 100644
--- a/mobile/lib/l10n/L.dart
+++ b/mobile/lib/l10n/L.dart
@@ -10,8 +10,13 @@ import 'messages_all.dart';
class L {
L(this.localeName, this.overrides);
- static Future load(Locale locale, Map> overrides) {
- final name = locale.countryCode?.isEmpty ?? true ? locale.languageCode : locale.toString();
+ static Future load(
+ Locale locale,
+ Map> overrides,
+ ) {
+ final name = locale.countryCode?.isEmpty ?? true
+ ? locale.languageCode
+ : locale.toString();
final localeName = Intl.canonicalizedLocale(name);
return initializeMessages(localeName).then((_) {
@@ -28,10 +33,13 @@ class L {
/// Message definitions start here
String? message(String name) {
- if (overrides == null || overrides.isEmpty || !overrides.containsKey(name)) {
+ if (overrides == null ||
+ overrides.isEmpty ||
+ !overrides.containsKey(name)) {
return null;
} else {
- return overrides[name]![localeName] ?? 'Missing translation for $name and locale $localeName';
+ return overrides[name]![localeName] ??
+ 'Missing translation for $name and locale $localeName';
}
}
@@ -162,7 +170,8 @@ class L {
Intl.message(
'Search for podcasts',
name: 'search_for_podcasts_hint',
- desc: 'Hint displayed on search bar when the user clicks the search icon.',
+ desc:
+ 'Hint displayed on search bar when the user clicks the search icon.',
locale: localeName,
);
}
@@ -172,7 +181,8 @@ class L {
Intl.message(
'Head to Settings to Connect a Pinepods Server if you haven\'t yet!',
name: 'no_subscriptions_message',
- desc: 'Displayed on the library tab when the user has no subscriptions',
+ desc:
+ 'Displayed on the library tab when the user has no subscriptions',
locale: localeName,
);
}
@@ -222,7 +232,8 @@ class L {
Intl.message(
'Are you sure you wish to delete this episode?',
name: 'delete_episode_confirmation',
- desc: 'User is asked to confirm when they attempt to delete an episode',
+ desc:
+ 'User is asked to confirm when they attempt to delete an episode',
locale: localeName,
);
}
@@ -242,7 +253,8 @@ class L {
Intl.message(
'You do not have any downloaded episodes',
name: 'no_downloads_message',
- desc: 'Displayed on the library tab when the user has no subscriptions',
+ desc:
+ 'Displayed on the library tab when the user has no subscriptions',
locale: localeName,
);
}
@@ -252,7 +264,8 @@ class L {
Intl.message(
'No podcasts found',
name: 'no_search_results_message',
- desc: 'Displayed on the library tab when the user has no subscriptions',
+ desc:
+ 'Displayed on the library tab when the user has no subscriptions',
locale: localeName,
);
}
@@ -262,7 +275,8 @@ class L {
Intl.message(
'Could not load podcast episodes. Please check your connection.',
name: 'no_podcast_details_message',
- desc: 'Displayed on the podcast details page when the details could not be loaded',
+ desc:
+ 'Displayed on the podcast details page when the details could not be loaded',
locale: localeName,
);
}
@@ -412,7 +426,8 @@ class L {
Intl.message(
'Are you sure you wish to stop this download and delete the episode?',
name: 'stop_download_confirmation',
- desc: 'User is asked to confirm when they wish to stop the active download.',
+ desc:
+ 'User is asked to confirm when they wish to stop the active download.',
locale: localeName,
);
}
@@ -482,7 +497,8 @@ class L {
Intl.message(
'New downloads will be saved to internal storage. Existing downloads will remain on the SD card.',
name: 'settings_download_switch_internal',
- desc: 'Displayed when user switches from internal SD card to internal storage',
+ desc:
+ 'Displayed when user switches from internal SD card to internal storage',
locale: localeName,
);
}
@@ -580,7 +596,7 @@ class L {
String get consent_message {
return message('consent_message') ??
Intl.message(
- 'This funding link will take you to an external site where you will be able to directly support the show. Links are provided by the podcast authors and is not controlled by Anytime.',
+ 'This funding link will take you to an external site where you will be able to directly support the show. Links are provided by the podcast authors and is not controlled by PinePods.',
name: 'consent_message',
desc: 'Display when first accessing external funding link',
locale: localeName,
@@ -632,7 +648,8 @@ class L {
Intl.message(
'Full screen player mode on episode start',
name: 'settings_auto_open_now_playing',
- desc: 'Displayed when user switches to use full screen player automatically',
+ desc:
+ 'Displayed when user switches to use full screen player automatically',
locale: localeName,
);
}
@@ -642,7 +659,8 @@ class L {
Intl.message(
'Unable to play episode. Please check your connection and try again.',
name: 'error_no_connection',
- desc: 'Displayed when attempting to start streaming an episode with no data connection',
+ desc:
+ 'Displayed when attempting to start streaming an episode with no data connection',
locale: localeName,
);
}
@@ -652,7 +670,8 @@ class L {
Intl.message(
'An unexpected error occurred during playback. Please check your connection and try again.',
name: 'error_playback_fail',
- desc: 'Displayed when attempting to start streaming an episode with no data connection',
+ desc:
+ 'Displayed when attempting to start streaming an episode with no data connection',
locale: localeName,
);
}
@@ -1100,7 +1119,7 @@ class L {
String get transcript_why_not_url {
return message('transcript_why_not_url') ??
Intl.message(
- 'https://anytimeplayer.app/docs/anytime_transcript_support_en.html',
+ 'https://www.pinepods.online/docs/Features/Transcript',
name: 'transcript_why_not_url',
desc: 'Language specific link',
locale: localeName,
@@ -1473,7 +1492,8 @@ class L {
Intl.message(
'No Episodes Found',
name: 'episode_filter_no_episodes_title_description',
- desc: 'This podcast has no episodes matching your search criteria and filter',
+ desc:
+ 'This podcast has no episodes matching your search criteria and filter',
locale: localeName,
);
}
@@ -1583,7 +1603,8 @@ class L {
Intl.message(
'Dismiss layout selector',
name: 'scrim_layout_selector',
- desc: 'Replaces default scrim label for layout selector bottom sheet.',
+ desc:
+ 'Replaces default scrim label for layout selector bottom sheet.',
locale: localeName,
);
}
@@ -1683,7 +1704,8 @@ class L {
Intl.message(
'Dismiss episode details',
name: 'scrim_episode_details_selector',
- desc: 'Replaces default scrim label for episode details bottom sheet.',
+ desc:
+ 'Replaces default scrim label for episode details bottom sheet.',
locale: localeName,
);
}
@@ -1719,17 +1741,18 @@ class L {
}
}
-class AnytimeLocalisationsDelegate extends LocalizationsDelegate {
- const AnytimeLocalisationsDelegate();
+class PinepodsLocalisationsDelegate extends LocalizationsDelegate {
+ const PinepodsLocalisationsDelegate();
@override
- bool isSupported(Locale locale) => ['en', 'de', 'it'].contains(locale.languageCode);
+ bool isSupported(Locale locale) =>
+ ['en', 'de', 'it'].contains(locale.languageCode);
@override
Future load(Locale locale) => L.load(locale, const {});
@override
- bool shouldReload(AnytimeLocalisationsDelegate old) => false;
+ bool shouldReload(PinepodsLocalisationsDelegate old) => false;
}
/// This class can be used by third-parties who wish to override or replace
@@ -1746,7 +1769,8 @@ class EmbeddedLocalisationsDelegate extends LocalizationsDelegate {
EmbeddedLocalisationsDelegate({@required this.messages = const {}});
@override
- bool isSupported(Locale locale) => ['en', 'de', 'it'].contains(locale.languageCode);
+ bool isSupported(Locale locale) =>
+ ['en', 'de', 'it'].contains(locale.languageCode);
@override
Future load(Locale locale) => L.load(locale, messages);
diff --git a/mobile/lib/l10n/intl_de.arb b/mobile/lib/l10n/intl_de.arb
index 4dacba59..55192f4c 100644
--- a/mobile/lib/l10n/intl_de.arb
+++ b/mobile/lib/l10n/intl_de.arb
@@ -1,6 +1,6 @@
{
"@@last_modified": "2020-02-20T10:40:43.008209",
- "app_title": "Anytime Podcast Player",
+ "app_title": "PinePods Podcast Player",
"@app_title": {
"description": "Full title for the application",
"type": "text",
@@ -324,7 +324,7 @@
"type": "text",
"placeholders": {}
},
- "consent_message": "Über diesen Finanzierungslink gelangen Sie zu einer externen Website, auf der Sie die Show direkt unterstützen können. Links werden von den Podcast-Autoren bereitgestellt und nicht von Anytime kontrolliert.",
+ "consent_message": "Über diesen Finanzierungslink gelangen Sie zu einer externen Website, auf der Sie die Show direkt unterstützen können. Links werden von den Podcast-Autoren bereitgestellt und nicht von PinePods kontrolliert.",
"@consent_message": {
"description": "Display when first accessing external funding link",
"type": "text",
@@ -657,7 +657,7 @@
"placeholders_order": [],
"placeholders": {}
},
- "transcript_why_not_url": "https://anytimeplayer.app/docs/anytime_transcript_support_de.html",
+ "transcript_why_not_url": "https://www.pinepods.online/docs/Features/Transcript",
"@transcript_why_not_url": {
"description": "Language specific link",
"type": "text",
@@ -1065,4 +1065,4 @@
"type": "text",
"placeholders": {}
}
-}
\ No newline at end of file
+}
diff --git a/mobile/lib/l10n/intl_en.arb b/mobile/lib/l10n/intl_en.arb
index fbcd5919..3b2ba58e 100644
--- a/mobile/lib/l10n/intl_en.arb
+++ b/mobile/lib/l10n/intl_en.arb
@@ -1,7 +1,7 @@
{
"@@last_modified": "2020-02-20T12:15:52.645497",
"@@locale": "en",
- "app_title": "Anytime Podcast Player",
+ "app_title": "PinePods Podcast Player",
"@app_title": {
"description": "Full title for the application",
"type": "text",
@@ -325,7 +325,7 @@
"type": "text",
"placeholders": {}
},
- "consent_message": "This funding link will take you to an external site where you will be able to directly support the show. Links are provided by the podcast authors and is not controlled by Anytime.",
+ "consent_message": "This funding link will take you to an external site where you will be able to directly support the show. Links are provided by the podcast authors and is not controlled by PinePods.",
"@consent_message": {
"description": "Display when first accessing external funding link",
"type": "text",
@@ -469,7 +469,7 @@
"type": "text",
"placeholders": {}
},
- "settings_personalisation_divider_label": "PERSONALISATION",
+ "settings_personalisation_divider_label": "Personalisation",
"@settings_personalisation_divider_label": {
"description": "Settings divider label for personalisation",
"type": "text",
@@ -481,7 +481,7 @@
"type": "text",
"placeholders": {}
},
- "settings_playback_divider_label": "PLAYBACK",
+ "settings_playback_divider_label": "Playback",
"@settings_playback_divider_label": {
"description": "Settings divider label for playback",
"type": "text",
@@ -658,7 +658,7 @@
"placeholders_order": [],
"placeholders": {}
},
- "transcript_why_not_url": "https://anytimeplayer.app/docs/anytime_transcript_support_en.html",
+ "transcript_why_not_url": "https://www.pinepods.online/docs/Features/Transcript",
"@transcript_why_not_url": {
"description": "Language specific link",
"type": "text",
@@ -1066,4 +1066,4 @@
"type": "text",
"placeholders": {}
}
-}
\ No newline at end of file
+}
diff --git a/mobile/lib/l10n/intl_it.arb b/mobile/lib/l10n/intl_it.arb
index a50bec33..74cf7af2 100644
--- a/mobile/lib/l10n/intl_it.arb
+++ b/mobile/lib/l10n/intl_it.arb
@@ -1,6 +1,6 @@
{
"@@last_modified": "2024-04-09T17:34:52.645497",
- "app_title": "Anytime Podcast Player",
+ "app_title": "PinePods Podcast Player",
"@app_title": {
"description": "Full title for the application",
"type": "text",
@@ -324,7 +324,7 @@
"type": "text",
"placeholders": {}
},
- "consent_message": "Questo link per la ricerca fondi ti porterà a un sito esterno dove avrai la possibilità di supportare direttamente questo show. I link sono forniti dagli autori del podcast e non sono verificati da Anytime.",
+ "consent_message": "Questo link per la ricerca fondi ti porterà a un sito esterno dove avrai la possibilità di supportare direttamente questo show. I link sono forniti dagli autori del podcast e non sono verificati da PinePods.",
"@consent_message": {
"description": "Display when first accessing external funding link",
"type": "text",
@@ -657,7 +657,7 @@
"placeholders_order": [],
"placeholders": {}
},
- "transcript_why_not_url": "https://anytimeplayer.app/docs/anytime_transcript_support_en.html",
+ "transcript_why_not_url": "https://www.pinepods.online/docs/Features/Transcript",
"@transcript_why_not_url": {
"description": "Language specific link",
"type": "text",
@@ -1065,4 +1065,4 @@
"type": "text",
"placeholders": {}
}
-}
\ No newline at end of file
+}
diff --git a/mobile/lib/l10n/intl_messages.arb b/mobile/lib/l10n/intl_messages.arb
index e24bdc92..564f2df7 100644
--- a/mobile/lib/l10n/intl_messages.arb
+++ b/mobile/lib/l10n/intl_messages.arb
@@ -325,7 +325,7 @@
"type": "text",
"placeholders": {}
},
- "consent_message": "This funding link will take you to an external site where you will be able to directly support the show. Links are provided by the podcast authors and is not controlled by Anytime.",
+ "consent_message": "This funding link will take you to an external site where you will be able to directly support the show. Links are provided by the podcast authors and is not controlled by Pinepods.",
"@consent_message": {
"description": "Display when first accessing external funding link",
"type": "text",
@@ -469,7 +469,7 @@
"type": "text",
"placeholders": {}
},
- "settings_personalisation_divider_label": "PERSONALISATION",
+ "settings_personalisation_divider_label": "Personalisation",
"@settings_personalisation_divider_label": {
"description": "Settings divider label for personalisation",
"type": "text",
@@ -481,7 +481,7 @@
"type": "text",
"placeholders": {}
},
- "settings_playback_divider_label": "PLAYBACK",
+ "settings_playback_divider_label": "Playback",
"@settings_playback_divider_label": {
"description": "Settings divider label for playback",
"type": "text",
@@ -637,7 +637,7 @@
"type": "text",
"placeholders": {}
},
- "transcript_why_not_url": "https://anytimeplayer.app/docs/anytime_transcript_support_en.html",
+ "transcript_why_not_url": "https://www.pinepods.online/docs/Features/Transcript",
"@transcript_why_not_url": {
"description": "Language specific link",
"type": "text",
@@ -1011,4 +1011,4 @@
"type": "text",
"placeholders": {}
}
-}
\ No newline at end of file
+}
diff --git a/mobile/lib/l10n/messages_de.dart b/mobile/lib/l10n/messages_de.dart
index 265c2d71..dffa80e7 100644
--- a/mobile/lib/l10n/messages_de.dart
+++ b/mobile/lib/l10n/messages_de.dart
@@ -14,8 +14,7 @@ import 'package:intl/message_lookup_by_library.dart';
final messages = MessageLookup();
-typedef String? MessageIfAbsent(
- String? messageStr, List