diff --git a/.github/workflows/PR-pipeline.yml b/.github/workflows/PR-pipeline.yml new file mode 100644 index 000000000..cc3d98cd9 --- /dev/null +++ b/.github/workflows/PR-pipeline.yml @@ -0,0 +1,435 @@ +name: run tests on pull request + +on: + pull_request: + +# Performance: Cancel outdated workflow runs +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +# Security: Set minimal default permissions for all jobs +permissions: + contents: read # All jobs can read code, but can't write by default + +jobs: + lint-and-build: + name: Lint and Build + runs-on: ubuntu-latest + permissions: + contents: read # Read code + pull-requests: write # Post lint comments + checks: write # Create check runs + statuses: write # Update check status + + steps: + - name: Check out Git repository + uses: actions/checkout@v5 + with: + fetch-depth: 1 + + - name: Set up Node.js + uses: actions/setup-node@v6 + with: + node-version: 22.18.0 + cache: 'npm' + + # Performance: Cache node_modules (shared with test jobs) + - name: Cache node_modules + id: node-cache + uses: actions/cache@v4.3.0 + with: + path: node_modules + key: node-modules-${{ runner.os }}-${{ hashFiles('package-lock.json') }} + restore-keys: | + node-modules-${{ runner.os }}- + + - name: Install Node.js dependencies + if: steps.node-cache.outputs.cache-hit != 'true' + run: npm ci + + - name: Run linters + uses: wearerequired/lint-action@v2.3.0 + with: + eslint: true + #prettier: true + continue_on_error: true + + # Build entire project INCLUDING tests (compile once, use 6 times!) + - name: Build project and tests + run: | + echo "๐Ÿ”จ Building TypeScript (src + tests)..." + npm run build:ci + echo "โœ… Build complete" + + # Upload compiled code for test jobs to use + - name: Upload compiled build + uses: actions/upload-artifact@v5 + with: + name: compiled-build + path: build/ + retention-days: 1 + + # Separate job: Verify migrations work (optional, can fail independently) + migration-check: + name: Migration Verification + runs-on: ubuntu-latest + needs: lint-and-build + + # Security: Explicit minimal permissions + permissions: + contents: read + + services: + postgres: + image: ghcr.io/giveth/postgres-givethio:latest + env: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: givethio + PGDATA: /var/lib/postgresql/data/pgdata + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 5443:5432 + + steps: + - name: Checkout + uses: actions/checkout@v5 + with: + fetch-depth: 1 + + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v5.1.0 + with: + aws-access-key-id: ${{ secrets.AWS_S3_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_S3_SECRET_ACCESS_KEY }} + aws-region: ${{ secrets.AWS_S3_REGION }} + mask-aws-account-id: true # Security: Hide account ID in logs + + # Performance: Get current week for cache key (caches DB backup for the week) + - name: Get current week + id: date + run: | + # Get ISO week number (e.g., 2025-W44) + WEEK=$(date +'%Y-W%V') + echo "week=$WEEK" >> $GITHUB_OUTPUT + echo "Cache key will be: db-backup-staging-$WEEK" + + # Performance: Cache database backup to avoid repeated downloads + # Cache is shared across all runs in the same week (7 days) + - name: Cache database backup + id: cache-db + uses: actions/cache@v4.3.0 + with: + path: /tmp/db_backup.zip + # Cache key: week-based so all runs same week share cache + # Format: db-backup-staging-2025-W44 (changes every Monday) + key: db-backup-staging-${{ steps.date.outputs.week }} + restore-keys: | + db-backup-staging- + + # Only download if cache miss + - name: Download latest DB backup from S3 + if: steps.cache-db.outputs.cache-hit != 'true' + run: | + set -e + echo "๐Ÿ“ฅ Cache miss - downloading fresh backup from S3..." + FILENAME=$(aws s3 ls ${{ secrets.AWS_S3_BUCKET_PATH_STAGING }}/ | sort | tail -n 1 | awk '{print $4}') + if [ -z "$FILENAME" ]; then + echo "Error: No backup file found in S3" + exit 1 + fi + echo "Downloading backup: $FILENAME" + aws s3 cp ${{ secrets.AWS_S3_BUCKET_PATH_STAGING }}/$FILENAME /tmp/db_backup.zip + echo "โœ… Download complete: $(du -h /tmp/db_backup.zip | cut -f1)" + + - name: Verify cached backup exists + if: steps.cache-db.outputs.cache-hit == 'true' + run: | + echo "โœ… Cache hit! Using cached database backup" + echo "Cached file size: $(du -h /tmp/db_backup.zip | cut -f1)" + + - name: Unzip and validate DB backup + run: | + set -e + echo "๐Ÿ“ฆ Extracting database backup..." + unzip -q /tmp/db_backup.zip -d /tmp + + # Security: Validate the backup file exists and is readable + SQL_FILE=$(find /tmp/backups/givethio-staging/*.sql -type f -print -quit 2>/dev/null) + if [ ! -f "$SQL_FILE" ]; then + echo "Error: SQL backup file not found" + exit 1 + fi + + mv "$SQL_FILE" /tmp/backups/givethio-staging/db_backup.sql + echo "โœ… Database backup prepared: $(du -h /tmp/backups/givethio-staging/db_backup.sql | cut -f1)" + + - name: Wait for PostgreSQL to become ready + run: | + for i in {1..10}; do + if pg_isready -h localhost -p 5443 -U postgres; then + echo "โœ… PostgreSQL is ready" + break + fi + echo -n . + sleep 1 + done + # Verify database is actually ready + if ! pg_isready -h localhost -p 5443 -U postgres; then + echo "โŒ PostgreSQL failed to become ready after 10 attempts" + exit 1 + fi + + - name: Use Node.js + uses: actions/setup-node@v6 + with: + node-version: 22.18.0 + cache: 'npm' + + # Performance: Cache node_modules (shared with all jobs) + - name: Cache node_modules + id: node-cache + uses: actions/cache@v4.3.0 + with: + path: node_modules + key: node-modules-${{ runner.os }}-${{ hashFiles('package-lock.json') }} + restore-keys: | + node-modules-${{ runner.os }}- + + - name: Install dependencies + if: steps.node-cache.outputs.cache-hit != 'true' + run: npm ci + + # Performance: Check if we have a cached post-migration database state FIRST + - name: Get migration checksum + id: migration-checksum + run: | + # Create checksum of all migration files to detect changes + CHECKSUM=$(find migration -name "*.ts" -type f -exec sha256sum {} \; | sort | sha256sum | cut -d' ' -f1) + echo "checksum=$CHECKSUM" >> $GITHUB_OUTPUT + echo "Migration checksum: $CHECKSUM" + + - name: Cache post-migration database state + id: cache-migrated-db + uses: actions/cache@v4.3.0 + with: + path: /tmp/migrated_db_dump.sql + key: migrated-db-${{ steps.date.outputs.week }}-${{ steps.migration-checksum.outputs.checksum }} + restore-keys: | + migrated-db-${{ steps.date.outputs.week }}- + + # Only restore initial backup if we DON'T have cached migrated DB + - name: Restore initial DB backup + if: steps.cache-migrated-db.outputs.cache-hit != 'true' + run: | + set -e + echo "๐Ÿ“ฆ Restoring initial database backup..." + PGPASSWORD=postgres psql -h localhost -p 5443 -U postgres -d givethio \ + < /tmp/backups/givethio-staging/db_backup.sql + echo "โœ… Initial database restored" + + # Restore from cached migrated database if available (SKIP initial restore) + - name: Restore cached post-migration database + if: steps.cache-migrated-db.outputs.cache-hit == 'true' + run: | + set -e + echo "โšก Restoring from cached post-migration database..." + PGPASSWORD=postgres psql -h localhost -p 5443 -U postgres -d givethio \ + --set ON_ERROR_STOP=on < /tmp/migrated_db_dump.sql + echo "โœ… Cached database restored (skipped migrations!)" + + # Only run migrations if no cached migrated DB + - name: Run migrations + if: steps.cache-migrated-db.outputs.cache-hit != 'true' + run: | + echo "๐Ÿ”„ Running migrations (no cache)..." + npm run db:migrate:run:test + echo "โœ… Migrations complete" + + # Create dump of migrated database for caching + - name: Create post-migration database dump + if: steps.cache-migrated-db.outputs.cache-hit != 'true' + run: | + echo "๐Ÿ’พ Creating post-migration database dump for caching..." + PGPASSWORD=postgres pg_dump -h localhost -p 5443 -U postgres -d givethio \ + --no-owner --no-acl > /tmp/migrated_db_dump.sql + + # Verify dump was created and show size + if [ ! -f /tmp/migrated_db_dump.sql ]; then + echo "โŒ ERROR: Dump file was not created!" + exit 1 + fi + + DUMP_SIZE=$(du -h /tmp/migrated_db_dump.sql | cut -f1) + DUMP_SIZE_MB=$(du -m /tmp/migrated_db_dump.sql | cut -f1) + echo "โœ… Dump created: $DUMP_SIZE ($DUMP_SIZE_MB MB)" + + # Warn if dump is very large + if [ $DUMP_SIZE_MB -gt 2000 ]; then + echo "โš ๏ธ Warning: Dump is large (${DUMP_SIZE_MB}MB), caching may be slow" + fi + + - name: Verify migrated database dump exists + run: | + if [ -f /tmp/migrated_db_dump.sql ]; then + echo "โœ… Migrated database dump ready for test jobs" + ls -lh /tmp/migrated_db_dump.sql + else + echo "โš ๏ธ Warning: Dump file not found, test jobs will need to migrate" + fi + + test: + name: Integration Tests - ${{ matrix.test-group-name }} + runs-on: ubuntu-latest + needs: lint-and-build # Tests don't need migration-check to complete! + + # Performance: Run test groups in parallel + strategy: + fail-fast: false # Don't cancel other groups if one fails + matrix: + include: + - test-group-name: "Resolvers" + test-pattern: "src/resolvers/**/*.test.ts" + - test-group-name: "Repositories" + test-pattern: "src/repositories/**/*.test.ts" + - test-group-name: "Services" + test-pattern: "src/services/**/*.test.ts" + - test-group-name: "Server & Routes" + test-pattern: "src/server/**/*.test.ts src/routers/**/*.test.ts src/user/**/*.test.ts" + - test-group-name: "Entities & Utils" + test-pattern: "src/entities/**/*.test.ts src/utils/**/*.test.ts src/adapters/**/*.test.ts" + - test-group-name: "Workers & Migrations" + test-pattern: "src/workers/**/*.test.ts migration/tests/*.test.ts" + + # Security: Explicit minimal permissions + permissions: + contents: read # Read code + pull-requests: write # Comment test results + checks: write # Update check runs + + services: + # Label used to access the service container + redis: + # Docker Hub image - Security: Consider using specific version tag + image: redis:7.4-alpine # Performance: Alpine is lighter, Security: specific version + # Set health checks to wait until redis has started + options: >- + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 6379:6379 + postgres: + image: ghcr.io/giveth/postgres-givethio:latest + env: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: givethio + PGDATA: /var/lib/postgresql/data/pgdata + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 5443:5432 + + steps: + - name: Checkout + uses: actions/checkout@v5 + with: + fetch-depth: 1 + + - name: Use Node.js + uses: actions/setup-node@v6 + with: + node-version: 22.18.0 + cache: 'npm' + + # Performance: Cache node_modules (shared from lint-and-build) + - name: Cache node_modules + id: node-cache + uses: actions/cache@v4.3.0 + with: + path: node_modules + key: node-modules-${{ runner.os }}-${{ hashFiles('package-lock.json') }} + restore-keys: | + node-modules-${{ runner.os }}- + + - name: Install dependencies + if: steps.node-cache.outputs.cache-hit != 'true' + run: npm ci + + # Performance: Download pre-compiled build from lint-and-build job + - name: Download compiled build + uses: actions/download-artifact@v6 + with: + name: compiled-build + path: build/ + + - name: Wait for services to become ready + run: | + echo "โณ Waiting for PostgreSQL and Redis..." + for i in {1..30}; do + if pg_isready -h localhost -p 5443 -U postgres > /dev/null 2>&1; then + echo "โœ… PostgreSQL is ready!" + break + fi + echo -n "." + sleep 1 + done + + # Redis should be ready almost immediately + if redis-cli -h localhost ping > /dev/null 2>&1; then + echo "โœ… Redis is ready!" + fi + + # Performance: Run pre-compiled JavaScript tests (much faster!) + - name: Run tests - ${{ matrix.test-group-name }} + run: | + echo "๐Ÿงช Running test group: ${{ matrix.test-group-name }}" + echo "๐Ÿ“ Test pattern: ${{ matrix.test-pattern }}" + + # Convert TypeScript patterns to JavaScript patterns + TEST_PATTERN=$(echo "${{ matrix.test-pattern }}" | sed 's/\.ts/.js/g') + echo "๐Ÿ“ฆ Compiled test pattern: $TEST_PATTERN" + + # Run compiled JavaScript tests (no ts-node overhead!) + NODE_ENV=test npx mocha \ + --exit \ + --retries 2 \ + ./build/test/pre-test-scripts.js \ + $(echo "$TEST_PATTERN" | sed 's|src/|build/src/|g') + env: + # Performance: Optimize Node.js memory for testing + NODE_OPTIONS: "--max-old-space-size=4096" + ETHERSCAN_API_KEY: ${{ secrets.ETHERSCAN_API_KEY }} + XDAI_NODE_HTTP_URL: ${{ secrets.XDAI_NODE_HTTP_URL }} + INFURA_API_KEY: ${{ secrets.INFURA_API_KEY }} + INFURA_ID: ${{ secrets.INFURA_ID }} + POLYGON_SCAN_API_KEY: ${{ secrets.POLYGON_SCAN_API_KEY }} + OPTIMISTIC_SCAN_API_KEY: ${{ secrets.OPTIMISTIC_SCAN_API_KEY }} + CELO_SCAN_API_KEY: ${{ secrets.CELO_SCAN_API_KEY }} + CELO_ALFAJORES_SCAN_API_KEY: ${{ secrets.CELO_ALFAJORES_SCAN_API_KEY }} + ARBITRUM_SCAN_API_KEY: ${{ secrets.ARBITRUM_SCAN_API_KEY }} + ARBITRUM_SEPOLIA_SCAN_API_KEY: ${{ secrets.ARBITRUM_SEPOLIA_SCAN_API_KEY }} + BASE_SCAN_API_KEY: ${{ secrets.BASE_SCAN_API_KEY }} + BASE_SEPOLIA_SCAN_API_KEY: ${{ secrets.BASE_SEPOLIA_SCAN_API_KEY }} + ZKEVM_MAINNET_SCAN_API_KEY: ${{ secrets.ZKEVM_MAINNET_SCAN_API_KEY }} + ZKEVM_CARDONA_SCAN_API_KEY: ${{ secrets.ZKEVM_CARDONA_SCAN_API_KEY }} + MORDOR_ETC_TESTNET: ${{ secrets.MORDOR_ETC_TESTNET }} + ETC_NODE_HTTP_URL: ${{ secrets.ETC_NODE_HTTP_URL }} + DROP_DATABASE: ${{ secrets.DROP_DATABASE_DURING_TEST_STAGING }} + SOLANA_TEST_NODE_RPC_URL: ${{ secrets.SOLANA_TEST_NODE_RPC_URL }} + SOLANA_DEVNET_NODE_RPC_URL: ${{ secrets.SOLANA_DEVNET_NODE_RPC_URL }} + SOLANA_MAINNET_NODE_RPC_URL: ${{ secrets.SOLANA_MAINNET_NODE_RPC_URL }} + MPETH_GRAPHQL_PRICES_URL: ${{ secrets.MPETH_GRAPHQL_PRICES_URL }} + GIV_POWER_SUBGRAPH_URL: ${{ secrets.GIV_POWER_SUBGRAPH_URL }} + VERIFY_RIGHT_URL: ${{ secrets.VERIFY_RIGHT_URL }} + VERIFY_RIGHT_TOKEN: ${{ secrets.VERIFY_RIGHT_TOKEN }} diff --git a/.github/workflows/master-pipeline.yml b/.github/workflows/master-pipeline.yml index 3ddf35cc0..a207ae3ca 100644 --- a/.github/workflows/master-pipeline.yml +++ b/.github/workflows/master-pipeline.yml @@ -4,9 +4,6 @@ on: push: branches: - master - pull_request: - branches: - - master jobs: run-linters: diff --git a/.github/workflows/run-tests-on-pr.yml.bck b/.github/workflows/run-tests-on-pr.yml.bck deleted file mode 100644 index c23bd1039..000000000 --- a/.github/workflows/run-tests-on-pr.yml.bck +++ /dev/null @@ -1,73 +0,0 @@ -name: run tests on pull request - -on: - workflow_dispatch: - pull_request: - branches: - - develop - - master - - staging - types: - - opened - -jobs: - test: - runs-on: ubuntu-latest - services: - # Label used to access the service container - redis: - # Docker Hub image - image: redis - # Set health checks to wait until redis has started - options: >- - --health-cmd "redis-cli ping" - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - - 6379:6379 - postgres: - # Use this postgres image https://github.com/Giveth/postgres-givethio - image: ghcr.io/giveth/postgres-givethio:latest - env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: givethio - PGDATA: /var/lib/postgresql/data/pgdata - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - - 5443:5432 - steps: - - uses: actions/checkout@v1 - - name: Use Node.js - uses: actions/setup-node@v1 - with: - node-version: 16.14.2 - - name: Install dependencies - run: npm ci - - name: Run eslint - run: npm run eslint - - name: Run build - run: npm run build - - name: Run migrations - run: npm run db:migrate:run:test - - name: Run tests - run: npm run test - env: - ETHERSCAN_API_KEY: ${{ secrets.ETHERSCAN_API_KEY }} - XDAI_NODE_HTTP_URL: ${{ secrets.XDAI_NODE_HTTP_URL }} - INFURA_API_KEY: ${{ secrets.INFURA_API_KEY }} - INFURA_ID: ${{ secrets.INFURA_ID }} - POLYGON_SCAN_API_KEY: ${{ secrets.POLYGON_SCAN_API_KEY }} - OPTIMISTIC_SCAN_API_KEY: ${{ secrets.OPTIMISTIC_SCAN_API_KEY }} - CELO_SCAN_API_KEY: ${{ secrets.CELO_SCAN_API_KEY }} - CELO_ALFAJORES_SCAN_API_KEY: ${{ secrets.CELO_ALFAJORES_SCAN_API_KEY }} - ARBITRUM_SCAN_API_KEY: ${{ secrets.ARBITRUM_SCAN_API_KEY }} - ARBITRUM_SEPOLIA_SCAN_API_KEY: ${{ secrets.ARBITRUM_SEPOLIA_SCAN_API_KEY }} - MPETH_GRAPHQL_PRICES_URL: ${{ secrets.MPETH_GRAPHQL_PRICES_URL }} - VERIFY_RIGHT_URL: ${{ secrets.VERIFY_RIGHT_URL }} - VERIFY_RIGHT_TOKEN: ${{ secrets.VERIFY_RIGHT_TOKEN }} diff --git a/.github/workflows/staging-pipeline.yml b/.github/workflows/staging-pipeline.yml index 6f2a8030f..1b2dc21f1 100644 --- a/.github/workflows/staging-pipeline.yml +++ b/.github/workflows/staging-pipeline.yml @@ -4,178 +4,119 @@ on: push: branches: - staging - pull_request: - branches: - - staging + +# Security: Set minimal default permissions for all jobs +permissions: + contents: read # All jobs can read code, but can't write by default jobs: - run-linters: - name: Run linters + linters-and-build: + name: Run linters and build runs-on: ubuntu-latest + permissions: + contents: read # Read code + pull-requests: write # Post lint comments + statuses: write # Update check status steps: - name: Check out Git repository uses: actions/checkout@v5 with: - fetch-depth: 0 + fetch-depth: 1 - name: Set up Node.js - uses: actions/setup-node@v5 + uses: actions/setup-node@v6 with: - node-version: 20.11.0 + node-version: 22.18.0 + cache: 'npm' # ESLint and Prettier must be in `package.json` - name: Install Node.js dependencies run: npm ci - name: Run linters - uses: wearerequired/lint-action@v2 + uses: wearerequired/lint-action@v2.3.0 with: eslint: true #prettier: true continue_on_error: true - test: - runs-on: ubuntu-latest - needs: run-linters - services: - # Label used to access the service container - redis: - # Docker Hub image - image: redis - # Set health checks to wait until redis has started - options: >- - --health-cmd "redis-cli ping" - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - - 6379:6379 - postgres: - # Use this postgres image https://github.com/Giveth/postgres-givethio - image: ghcr.io/giveth/postgres-givethio:latest - env: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: givethio - PGDATA: /var/lib/postgresql/data/pgdata - options: >- - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - ports: - - 5443:5432 - - steps: - - name: Checkout - uses: actions/checkout@v5 - with: - fetch-depth: 0 - - name: Configure AWS credentials - uses: aws-actions/configure-aws-credentials@v2 - with: - aws-access-key-id: ${{ secrets.AWS_S3_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_S3_SECRET_ACCESS_KEY }} - aws-region: ${{ secrets.AWS_S3_REGION }} - - - name: Download latest DB backup from S3 + - name: Build project run: | - FILENAME=$(aws s3 ls ${{ secrets.AWS_S3_BUCKET_PATH_STAGING }}/ | sort | tail -n 1 | awk '{print $4}') - aws s3 cp ${{ secrets.AWS_S3_BUCKET_PATH_STAGING }}/$FILENAME /tmp/db_backup.zip - - - name: Unzip DB backup - run: | - unzip /tmp/db_backup.zip -d /tmp - mv /tmp/backups/givethio-staging/*.sql /tmp/backups/givethio-staging/db_backup.sql - - - name: Wait for PostgreSQL to become ready - run: | - for i in {1..10} - do - pg_isready -h localhost -p 5443 -U postgres && echo Success && break - echo -n . - sleep 1 - done - - - name: Restore DB backup - run: PGPASSWORD=postgres psql -h localhost -p 5443 -U postgres -d givethio < /tmp/backups/givethio-staging/db_backup.sql - - - name: Use Node.js - uses: actions/setup-node@v1 - with: - node-version: 20.11.0 - - - name: Install dependencies - run: npm ci - - - name: Run eslint - run: npm run eslint - - - name: Run build - run: npm run build - - - name: Run migrations - run: npm run db:migrate:run:test - - - name: Run tests - run: npm run test - env: - ETHERSCAN_API_KEY: ${{ secrets.ETHERSCAN_API_KEY }} - XDAI_NODE_HTTP_URL: ${{ secrets.XDAI_NODE_HTTP_URL }} - INFURA_API_KEY: ${{ secrets.INFURA_API_KEY }} - INFURA_ID: ${{ secrets.INFURA_ID }} - POLYGON_SCAN_API_KEY: ${{ secrets.POLYGON_SCAN_API_KEY }} - OPTIMISTIC_SCAN_API_KEY: ${{ secrets.OPTIMISTIC_SCAN_API_KEY }} - CELO_SCAN_API_KEY: ${{ secrets.CELO_SCAN_API_KEY }} - CELO_ALFAJORES_SCAN_API_KEY: ${{ secrets.CELO_ALFAJORES_SCAN_API_KEY }} - ARBITRUM_SCAN_API_KEY: ${{ secrets.ARBITRUM_SCAN_API_KEY }} - ARBITRUM_SEPOLIA_SCAN_API_KEY: ${{ secrets.ARBITRUM_SEPOLIA_SCAN_API_KEY }} - BASE_SCAN_API_KEY: ${{ secrets.BASE_SCAN_API_KEY }} - BASE_SEPOLIA_SCAN_API_KEY: ${{ secrets.BASE_SEPOLIA_SCAN_API_KEY }} - ZKEVM_MAINNET_SCAN_API_KEY: ${{ secrets.ZKEVM_MAINNET_SCAN_API_KEY }} - ZKEVM_CARDONA_SCAN_API_KEY: ${{ secrets.ZKEVM_CARDONA_SCAN_API_KEY }} - MORDOR_ETC_TESTNET: ${{ secrets.MORDOR_ETC_TESTNET }} - ETC_NODE_HTTP_URL: ${{ secrets.ETC_NODE_HTTP_URL }} - DROP_DATABASE: ${{ secrets.DROP_DATABASE_DURING_TEST_STAGING }} - SOLANA_TEST_NODE_RPC_URL: ${{ secrets.SOLANA_TEST_NODE_RPC_URL }} - SOLANA_DEVNET_NODE_RPC_URL: ${{ secrets.SOLANA_DEVNET_NODE_RPC_URL }} - SOLANA_MAINNET_NODE_RPC_URL: ${{ secrets.SOLANA_MAINNET_NODE_RPC_URL }} - MPETH_GRAPHQL_PRICES_URL: ${{ secrets.MPETH_GRAPHQL_PRICES_URL }} - GIV_POWER_SUBGRAPH_URL: ${{ secrets.GIV_POWER_SUBGRAPH_URL }} - VERIFY_RIGHT_URL: ${{ secrets.VERIFY_RIGHT_URL }} - VERIFY_RIGHT_TOKEN: ${{ secrets.VERIFY_RIGHT_TOKEN }} + echo "๐Ÿ”จ Building TypeScript..." + npm run build + echo "โœ… Build complete" publish: - needs: test + needs: linters-and-build runs-on: ubuntu-latest - if: github.event_name == 'push' permissions: - contents: read - packages: write + contents: read # Read code + packages: write # Push to GHCR + id-token: write # Cosign signing + outputs: + image-digest: ${{ steps.build.outputs.digest }} steps: - name: Check out the repo uses: actions/checkout@v5 with: - fetch-depth: 0 + fetch-depth: 1 + - name: Login to GitHub Container Registry - run: echo ${{ secrets.GITHUB_TOKEN }} | docker login ghcr.io -u ${{ github.actor }} --password-stdin + uses: docker/login-action@v3.6.0 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3.11.1 + + # Performance: Build with cache for faster builds - name: Build image and push to GitHub Packages - uses: docker/build-push-action@v6 + id: build + uses: docker/build-push-action@v6.18.0 with: + context: . push: true - no-cache: true - repository: giveth/impact-graph tags: | ghcr.io/giveth/impact-graph:staging ghcr.io/giveth/impact-graph:${{ github.sha }} + cache-from: type=gha + cache-to: type=gha,mode=max + provenance: true + sbom: true + + # Security: Sign container image with Cosign + - name: Install Cosign + uses: sigstore/cosign-installer@v4.0.0 + + - name: Sign container image + run: | + cosign sign --yes ghcr.io/giveth/impact-graph@${{ steps.build.outputs.digest }} deploy: needs: publish runs-on: ubuntu-latest + permissions: + contents: read # Read code for actions + id-token: write # Cosign signature verification steps: - - name: SSH and Redeploy - uses: appleboy/ssh-action@v1.0.0 + # Security: Verify image signature before deployment + - name: Install Cosign + uses: sigstore/cosign-installer@v4.0.0 + + - name: Verify Image Signature + run: | + echo "๐Ÿ” Verifying image signature..." + cosign verify ghcr.io/giveth/impact-graph@${{ needs.publish.outputs.image-digest }} \ + --certificate-identity "${{ github.server_url }}/${{ github.repository }}/.github/workflows/staging-pipeline.yml@${{ github.ref }}" \ + --certificate-oidc-issuer="https://token.actions.githubusercontent.com" + echo "โœ… Image signature verified successfully! Image is authentic and untampered." + + # Only deploys if signature verification passes + - name: SSH and Pull Verified Image + uses: appleboy/ssh-action@v1.2.2 with: host: ${{ secrets.STAGING_HOST_ALL }} username: ${{ secrets.STAGING_USERNAME_ALL }} @@ -183,15 +124,22 @@ jobs: port: ${{ secrets.SSH_PORT }} script: | cd giveth-all - # Pull the relevant images once; rollout jobs will reuse them - docker pull ghcr.io/giveth/impact-graph:staging + # Pull the verified image by its immutable SHA + docker pull ghcr.io/giveth/impact-graph@${{ needs.publish.outputs.image-digest }} + + # Export the verified SHA for docker-compose to use + echo "IMPACT_GRAPH_IMAGE=ghcr.io/giveth/impact-graph@${{ needs.publish.outputs.image-digest }}" > .env.impact-graph + + echo "โœ… Verified image pulled: ghcr.io/giveth/impact-graph@${{ needs.publish.outputs.image-digest }}" rollout-deploy-1: needs: deploy runs-on: ubuntu-latest + permissions: + contents: none # Minimal - only for actions to work steps: - name: SSH and Redeploy - uses: appleboy/ssh-action@v1.0.0 + uses: appleboy/ssh-action@v1.2.2 with: host: ${{ secrets.STAGING_HOST_ALL }} username: ${{ secrets.STAGING_USERNAME_ALL }} @@ -199,18 +147,23 @@ jobs: port: ${{ secrets.SSH_PORT }} script: | cd giveth-all + # Load the verified image SHA from environment + export $(cat .env.impact-graph | xargs) + ## Update each backend service one by one - ## First Deployment (images were pulled earlier) + ## First Deployment - uses IMPACT_GRAPH_IMAGE with verified SHA docker compose up -d --no-deps --force-recreate impact-graph-ql1 docker compose up -d --no-deps --force-recreate impact-graph-jobs - echo "First deployment phase triggered. Deep health checks deferred to verify-health job." + echo "First deployment phase triggered using verified image: $IMPACT_GRAPH_IMAGE" rollout-deploy-2: needs: rollout-deploy-1 runs-on: ubuntu-latest + permissions: + contents: none # Minimal - only for actions to work steps: - name: SSH and Redeploy - uses: appleboy/ssh-action@v1.0.0 + uses: appleboy/ssh-action@v1.2.2 with: host: ${{ secrets.STAGING_HOST_ALL }} username: ${{ secrets.STAGING_USERNAME_ALL }} @@ -218,17 +171,22 @@ jobs: port: ${{ secrets.SSH_PORT }} script: | cd giveth-all + # Load the verified image SHA from environment + export $(cat .env.impact-graph | xargs) + ## Update each backend service one by one - ## Second Deployment (images were pulled earlier) + ## Second Deployment - uses IMPACT_GRAPH_IMAGE with verified SHA docker compose up -d --no-deps --force-recreate impact-graph-ql2 - echo "Second deployment phase triggered. Health checks deferred to verify-health job." + echo "Second deployment phase triggered using verified image: $IMPACT_GRAPH_IMAGE" rollout-deploy-3: needs: rollout-deploy-2 runs-on: ubuntu-latest + permissions: + contents: none # Minimal - only for actions to work steps: - name: SSH and Redeploy - uses: appleboy/ssh-action@v1.0.0 + uses: appleboy/ssh-action@v1.2.2 with: host: ${{ secrets.STAGING_HOST_ALL }} username: ${{ secrets.STAGING_USERNAME_ALL }} @@ -236,28 +194,37 @@ jobs: port: ${{ secrets.SSH_PORT }} script: | cd giveth-all + # Load the verified image SHA from environment + export $(cat .env.impact-graph | xargs) + ## Update each backend service one by one - ## Third Deployment (images were pulled earlier) + ## Third Deployment - uses IMPACT_GRAPH_IMAGE with verified SHA docker compose up -d --no-deps --force-recreate impact-graph-ql3 - echo "Third deployment phase triggered. Health checks deferred to verify-health job." + echo "Third deployment phase triggered using verified image: $IMPACT_GRAPH_IMAGE" rollout-deploy-4: needs: rollout-deploy-3 runs-on: ubuntu-latest + permissions: + contents: none # Minimal - only for actions to work steps: - name: SSH and Redeploy - uses: appleboy/ssh-action@v1.0.0 + uses: appleboy/ssh-action@v1.2.2 with: host: ${{ secrets.STAGING_HOST_ALL }} username: ${{ secrets.STAGING_USERNAME_ALL }} key: ${{ secrets.STAGING_PRIVATE_KEY_ALL }} port: ${{ secrets.SSH_PORT }} script: | + set -e cd giveth-all + # Load the verified image SHA from environment + export $(cat .env.impact-graph | xargs) + ## Update each backend service one by one - ## Fourth Deployment (images were pulled earlier) + ## Fourth Deployment - uses IMPACT_GRAPH_IMAGE with verified SHA docker compose up -d --no-deps --force-recreate impact-graph-ql4 - echo "Fourth deployment phase triggered. Health checks deferred to verify-health job." + echo "Fourth deployment phase triggered using verified image: $IMPACT_GRAPH_IMAGE" verify-health: needs: @@ -266,11 +233,14 @@ jobs: - rollout-deploy-3 - rollout-deploy-4 runs-on: ubuntu-latest + permissions: + contents: none # Minimal - only for actions to work steps: - name: Wait for services to stabilize - run: sleep 180 + run: sleep 60 + - name: SSH and Verify Health - uses: appleboy/ssh-action@v1.0.0 + uses: appleboy/ssh-action@v1.2.2 with: host: ${{ secrets.STAGING_HOST_ALL }} username: ${{ secrets.STAGING_USERNAME_ALL }} @@ -280,13 +250,15 @@ jobs: set -e cd giveth-all services=("impact-graph-ql1" "impact-graph-ql2" "impact-graph-ql3" "impact-graph-ql4") + + echo "=== Final Health Check for All Services ===" for svc in "${services[@]}"; do echo "Checking health for $svc ..." ok=0 for i in {1..12}; do # up to ~2 minutes status=$(docker inspect --format='{{json .State.Health.Status}}' "$svc" 2>/dev/null || echo '""') if [ "$status" = "\"healthy\"" ]; then - echo "$svc is healthy" + echo "โœ“ $svc is healthy" ok=1 break fi @@ -295,6 +267,8 @@ jobs: done if [ $ok -ne 1 ]; then echo "::error::$svc did not reach healthy status" + # Get container logs for debugging + docker logs --tail 100 "$svc" || true exit 1 fi done diff --git a/.gitignore b/.gitignore index 6799a37fa..6648c732f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ # node modules node_modules +*.env # builded sources build diff --git a/README.md b/README.md index 281d2bbbe..1947ebdda 100644 --- a/README.md +++ b/README.md @@ -5,12 +5,14 @@ ## 1. Project Overview -Impact Graph is a GraphQL server designed to enable rapid development of serverless impact project applications by managing the persistence and access of impact project data. +Impact Graph is a GraphQL server designed to enable rapid development of serverless impact project applications by managing the persistence of and access to impact project data. ### Purpose + Impact Graph serves as the backend for Giveth's donation platform, providing a robust API for managing projects, donations, user accounts, and various features related to the Giveth ecosystem. ### Key Features + - **Project Management**: Create, Update, and Manage charitable projects - **User Authentication**: Multiple authentication strategies including JWT and OAuth - **Donation Processing**: Handle and verify donations on multiple blockchain networks @@ -21,6 +23,7 @@ Impact Graph serves as the backend for Giveth's donation platform, providing a r - **Multi-Blockchain Support**: Integration with Ethereum, Gnosis Chain, Polygon, Celo, Optimism, and more ### Live Links + - Production API: https://serve.giveth.io/graphql - Staging API: https://staging.serve.giveth.io/graphql - Frontend application: https://giveth.io @@ -28,6 +31,7 @@ Impact Graph serves as the backend for Giveth's donation platform, providing a r ## 2. Architecture Overview ### System Diagram + Impact Graph acts as a central API gateway that connects the frontend application with various backend services and data sources: ``` @@ -42,6 +46,7 @@ Database Blockchain External Pinata Redis ``` ### Tech Stack + - **Server**: Node.js with TypeScript - **API**: GraphQL (Apollo Server) - **Database**: PostgreSQL with TypeORM @@ -55,6 +60,7 @@ Database Blockchain External Pinata Redis - **File Storage**: Pinata IPFS ### Data Flow + 1. Frontend applications consume the GraphQL API endpoints 2. GraphQL resolvers process requests, applying business logic 3. Data is persisted in PostgreSQL via TypeORM @@ -66,11 +72,13 @@ Database Blockchain External Pinata Redis ### Process Diagrams #### Donation Flow + This diagram illustrates how donations are processed in the system: [![Donation Flow](https://mermaid.ink/img/eyJjb2RlIjoic2VxdWVuY2VEaWFncmFtXG4gICAgYXV0b251bWJlclxuICAgIGFjdG9yIFVzZXJcbiAgICBwYXJ0aWNpcGFudCBGcm9udGVuZFxuICAgIHBhcnRpY2lwYW50IE1ldGFtYXNrXG4gICAgcGFydGljaXBhbnQgQmFja2VuZFxuICAgIHBhcnRpY2lwYW50IERCXG4gICAgcGFydGljaXBhbnQgTW9ub3N3YXBcblxuICAgIFVzZXIgLT4-IEZyb250ZW5kOiBXYW50IHRvIGRvbmF0ZVxuICAgIG5vdGUgb3ZlciBGcm9udGVuZDogQ2hlY2sgcHJvamVjdCB3YWxsZXQgYWRkcmVzc1xuXG4gICAgRnJvbnRlbmQgLT4-IE1ldGFtYXNrIDogQ3JlYXRlIHRyYW5zYWN0aW9uXG4gICAgTWV0YW1hc2sgLS0-PiBVc2VyOiBTaG93IGNvbmZpcm0gdHJhbnNhY3Rpb24gcG9wdXBcbiAgICBVc2VyIC0-PiBNZXRhbWFzazogT2tcbiAgICBNZXRhbWFzayAtPj4gRnJvbnRlbmQgOiBGcm9udGVuZCBnZXQgdHhIYXNoIGJ5IHdlYjNcbiAgICBGcm9udGVuZCAtPj4gQmFja2VuZDogU2F2ZSBuZXcgZG9uYXRpb25cbiAgICBub3RlIG92ZXIgQmFja2VuZDogVmFsaWRhdGUgZG9uYXRpb24ncyBpbmZvIHdpdGggcHJvamVjdCB3YWxsZXQgYWRkcmVzcywgdXNlciwgLi5cbiAgICBCYWNrZW5kIC0-PiBEQjogU2F2ZSBkb25hdGlvbiB0byBEQlxuICAgIERCIC0tPj4gQmFja2VuZCA6IE9rXG4gICAgQmFja2VuZCAtPj4gTW9ub3N3YXAgOiBHZXQgcHJpY2Ugb2YgZG9uYXRlZCB0b2tlblxuICAgIGFsdCBNb25vc3dhcCBjYW4gZmV0Y2ggcHJpY2U6XG4gICAgICAgIE1vbm9zd2FwIC0tPj4gQmFja2VuZCA6IHJldHVybiBwcmljZVxuICAgICAgICBCYWNrZW5kIC0-PiBEQjogVXBkYXRlIHByaWNlVXNkIGFuZCB2YWx1ZVVzZCBvZiBkb25hdGlvblxuICAgICAgICBEQiAtLT4-IEJhY2tlbmQ6IE9rXG4gICAgICAgIEJhY2tlbmQgLT4-IERCOiBVcGRhdGUgcHJvamVjdCB0b3RhbERvbmF0aW9ucyB2YWx1ZVVzZFxuICAgICAgICBEQiAtLT4-IEJhY2tlbmQ6IE9rXG4gICAgZWxzZSBSZXR1cm4gZXJyb3I6XG4gICAgICAgIE1vbm9zd2FwIC0tPj4gQmFja2VuZCA6IFJldHVybiBFcnJvclxuICAgICAgICBub3RlIG92ZXIgQmFja2VuZDogRG8gbm90aGluZ1xuICAgIGVuZFxuICAgIEJhY2tlbmQgLS0-PiBGcm9udGVuZDogT2tcbiIsIm1lcm1haWQiOnsidGhlbWUiOiJkZWZhdWx0In0sInVwZGF0ZUVkaXRvciI6dHJ1ZSwiYXV0b1N5bmMiOnRydWUsInVwZGF0ZURpYWdyYW0iOmZhbHNlfQ)](https://mermaid-js.github.io/mermaid-live-editor/edit/#eyJjb2RlIjoic2VxdWVuY2VEaWFncmFtXG4gICAgYXV0b251bWJlclxuICAgIGFjdG9yIFVzZXJcbiAgICBwYXJ0aWNpcGFudCBGcm9udGVuZFxuICAgIHBhcnRpY2lwYW50IE1ldGFtYXNrXG4gICAgcGFydGljaXBhbnQgQmFja2VuZFxuICAgIHBhcnRpY2lwYW50IERCXG4gICAgcGFydGljaXBhbnQgTW9ub3N3YXBcblxuICAgIFVzZXIgLT4-IEZyb250ZW5kOiBXYW50IHRvIGRvbmF0ZVxuICAgIG5vdGUgb3ZlciBGcm9udGVuZDogQ2hlY2sgcHJvamVjdCB3YWxsZXQgYWRkcmVzc1xuXG4gICAgRnJvbnRlbmQgLT4-IE1ldGFtYXNrIDogQ3JlYXRlIHRyYW5zYWN0aW9uXG4gICAgTWV0YW1hc2sgLS0-PiBVc2VyOiBTaG93IGNvbmZpcm0gdHJhbnNhY3Rpb24gcG9wdXBcbiAgICBVc2VyIC0-PiBNZXRhbWFzazogT2tcbiAgICBNZXRhbWFzayAtPj4gRnJvbnRlbmQgOiBGcm9udGVuZCBnZXQgdHhIYXNoIGJ5IHdlYjNcbiAgICBGcm9udGVuZCAtPj4gQmFja2VuZDogU2F2ZSBuZXcgZG9uYXRpb25cbiAgICBub3RlIG92ZXIgQmFja2VuZDogVmFsaWRhdGUgZG9uYXRpb24ncyBpbmZvIHdpdGggcHJvamVjdCB3YWxsZXQgYWRkcmVzcywgdXNlciwgLi5cbiAgICBCYWNrZW5kIC0-PiBEQjogU2F2ZSBkb25hdGlvbiB0byBEQlxuICAgIERCIC0tPj4gQmFja2VuZCA6IE9rXG4gICAgQmFja2VuZCAtPj4gTW9ub3N3YXAgOiBHZXQgcHJpY2Ugb2YgZG9uYXRlZCB0b2tlblxuICAgIGFsdCBNb25vc3dhcCBjYW4gZmV0Y2ggcHJpY2U6XG4gICAgICAgIE1vbm9zd2FwIC0tPj4gQmFja2VuZCA6IHJldHVybiBwcmljZVxuICAgICAgICBCYWNrZW5kIC0-PiBEQjogVXBkYXRlIHByaWNlVXNkIGFuZCB2YWx1ZVVzZCBvZiBkb25hdGlvblxuICAgICAgICBEQiAtLT4-IEJhY2tlbmQ6IE9rXG4gICAgICAgIEJhY2tlbmQgLT4-IERCOiBVcGRhdGUgcHJvamVjdCB0b3RhbERvbmF0aW9ucyB2YWx1ZVVzZFxuICAgICAgICBEQiAtLT4-IEJhY2tlbmQ6IE9rXG4gICAgZWxzZSBSZXR1cm4gZXJyb3I6XG4gICAgICAgIE1vbm9zd2FwIC0tPj4gQmFja2VuZCA6IFJldHVybiBFcnJvclxuICAgICAgICBub3RlIG92ZXIgQmFja2VuZDogRG8gbm90aGluZ1xuICAgIGVuZFxuICAgIEJhY2tlbmQgLS0-PiBGcm9udGVuZDogT2tcbiIsIm1lcm1haWQiOiJ7XG4gIFwidGhlbWVcIjogXCJkZWZhdWx0XCJcbn0iLCJ1cGRhdGVFZGl0b3IiOnRydWUsImF1dG9TeW5jIjp0cnVlLCJ1cGRhdGVEaWFncmFtIjpmYWxzZX0) #### Social Network Verification Flow + This diagram shows how project verification works through social network accounts: [![Social Network Verification](https://mermaid.ink/img/pako:eNrFVcFu2zAM_RXCl7VAmgA9-hCga9euQJcNS7tTgEGVaEeNLXkSnSwo-u-jJDtJ03TYgA7zyYbI98j3aOoxk1Zhlmcef7RoJF5oUTpRzwzwIyRZB3ceXfpuhCMtdSMMwaWzhtColyfvhVwcPJhOQHiYWqlFBROklXULCGEzk4J7TDgZj3uYHM5FVYFQaoKrlPvF2UJXCHVLgrQ1cOQ5UJsSfDzvoQUjNc4-oKRv6HShZQy_tK6-VseJ0lhCsEt0W74pEmjTtARKkOBXcKi0j3AlGnSCUwSQ4wQmPWcBX8X6JEIItK6C-zXISqOhazWIiI7ruuODAQyHw4TQ5cFJEKBXI4evDql1JsAcEGo64YgOD1o2C8h2SoDppVDKofcpm32IBMHYHM78YpPlUCiQ1rCeFPutbKk7c0I0bAhvwsFLom5uKoIL7aV13RT0rLu2vih55Hfd9SPJvt9z9EglKFhpmvNMSm7kO1lGCd7w2Lo1eHKs85ZsI-R4HKq9Yktj_doUNjiRUG4DyPMK90pMsveZKXQz26HNK2vLCt-iyzIiDVKX4a_8o-6iGdPwEVOYoORe_6q9neDouVZJ3W3mgcG-wFRiFwtHoOmdh4cVHXc_Smr5OgheOFsDzdHhjoJbDW-0WbBM5i1UrDqs_6bjvnb7HHGbzYkan49GotHDvuKhtPVoeTqq8TdUYdG9Oo8HbLqKu6y3IO2xKMzu9oqG0dzZFeBPiU3cqbpIjjIoLEWlX-c4560RN-IyLllU8MyciO6wCjGagrIHdjIUvJS3E_H57O7242naPuDbKOxhSbdLsuh3YtmuPfi5bSt-10tkvbi4IHNg4UZZg6Z1jfW4J-AefSG4fPWPebNBVqOrhVZ8Cz_G2zDjX4WnIMv5VWEh2opm2cw8cWjb8J2EH5TmiznLC1F5HGSiJTtdG5nl5Frsg7qbvIt6-gW6nqHe)](https://mermaid-js.github.io/mermaid-live-editor/edit#pako:eNrFVcFu2zAM_RXCl7VAmgA9-hCga9euQJcNS7tTgEGVaEeNLXkSnSwo-u-jJDtJ03TYgA7zyYbI98j3aOoxk1Zhlmcef7RoJF5oUTpRzwzwIyRZB3ceXfpuhCMtdSMMwaWzhtColyfvhVwcPJhOQHiYWqlFBROklXULCGEzk4J7TDgZj3uYHM5FVYFQaoKrlPvF2UJXCHVLgrQ1cOQ5UJsSfDzvoQUjNc4-oKRv6HShZQy_tK6-VseJ0lhCsEt0W74pEmjTtARKkOBXcKi0j3AlGnSCUwSQ4wQmPWcBX8X6JEIItK6C-zXISqOhazWIiI7ruuODAQyHw4TQ5cFJEKBXI4evDql1JsAcEGo64YgOD1o2C8h2SoDppVDKofcpm32IBMHYHM78YpPlUCiQ1rCeFPutbKk7c0I0bAhvwsFLom5uKoIL7aV13RT0rLu2vih55Hfd9SPJvt9z9EglKFhpmvNMSm7kO1lGCd7w2Lo1eHKs85ZsI-R4HKq9Yktj_doUNjiRUG4DyPMK90pMsveZKXQz26HNK2vLCt-iyzIiDVKX4a_8o-6iGdPwEVOYoORe_6q9neDouVZJ3W3mgcG-wFRiFwtHoOmdh4cVHXc_Smr5OgheOFsDzdHhjoJbDW-0WbBM5i1UrDqs_6bjvnb7HHGbzYkan49GotHDvuKhtPVoeTqq8TdUYdG9Oo8HbLqKu6y3IO2xKMzu9oqG0dzZFeBPiU3cqbpIjjIoLEWlX-c4560RN-IyLllU8MyciO6wCjGagrIHdjIUvJS3E_H57O7242naPuDbKOxhSbdLsuh3YtmuPfi5bSt-10tkvbi4IHNg4UZZg6Z1jfW4J-AefSG4fPWPebNBVqOrhVZ8Cz_G2zDjX4WnIMv5VWEh2opm2cw8cWjb8J2EH5TmiznLC1F5HGSiJTtdG5nl5Frsg7qbvIt6-gW6nqHe) @@ -78,30 +86,36 @@ This diagram shows how project verification works through social network account ## 3. Getting Started ### Prerequisites + - Node.js (v20.11.0 or later as specified in .nvmrc) - PostgreSQL database - Redis instance - Various API keys for external services (detailed in environment variables) ### Installation Steps + 1. Clone the repository: + ``` git clone git@github.com:Giveth/impact-graph.git cd impact-graph ``` 2. Install dependencies: + ``` nvm use # Uses version specified in .nvmrc npm i ``` 3. Configure environment: + ``` cp config/example.env config/development.env ``` 4. Set up database: + - Either create a PostgreSQL database manually, or - Use Docker Compose to spin up a local database: ``` @@ -109,6 +123,7 @@ This diagram shows how project verification works through social network account ``` 5. Run database migrations: + ``` npm run db:migrate:run:local ``` @@ -119,9 +134,11 @@ This diagram shows how project verification works through social network account ``` ### Configuration + The application is configured via environment variables. Key configuration areas include: 1. **Database Connection**: + ``` TYPEORM_DATABASE_TYPE=postgres TYPEORM_DATABASE_NAME=givethio @@ -132,6 +149,7 @@ The application is configured via environment variables. Key configuration areas ``` 2. **Authentication**: + ``` JWT_SECRET=your_jwt_secret JWT_MAX_AGE=time_in_seconds @@ -139,6 +157,7 @@ The application is configured via environment variables. Key configuration areas ``` 3. **Blockchain Providers**: + ``` XDAI_NODE_HTTP_URL=your_xdai_node_url INFURA_API_KEY=your_infura_key @@ -159,17 +178,20 @@ For a complete list of configuration options, refer to the `config/example.env` ### Running the Application **Development mode**: + ``` npm start ``` **Production mode**: + ``` npm run build npm run production ``` **Docker**: + ``` # Development docker-compose -f docker-compose-local.yml up -d @@ -179,12 +201,15 @@ docker-compose -f docker-compose-production.yml up -d ``` ### Testing + Run all tests: + ``` npm test ``` Run specific test suites: + ``` npm run test:userRepository npm run test:projectResolver @@ -193,6 +218,7 @@ npm run test:donationRepository ``` For running tests with necessary environment variables: + ``` PINATA_API_KEY=0000000000000 PINATA_SECRET_API_KEY=00000000000000000000000000000000000000000000000000000000 ETHERSCAN_API_KEY=0000000000000000000000000000000000 XDAI_NODE_HTTP_URL=https://xxxxxx.xdai.quiknode.pro INFURA_API_KEY=0000000000000000000000000000000000 npm run test ``` @@ -200,29 +226,34 @@ PINATA_API_KEY=0000000000000 PINATA_SECRET_API_KEY=00000000000000000000000000000 ### Common Tasks **Creating migrations**: + ``` npx typeorm-ts-node-commonjs migration:create ./migration/create_new_table ``` **Running migrations**: + ``` npm run db:migrate:run:local # For development npm run db:migrate:run:production # For production ``` **Reverting migrations**: + ``` npm run db:migrate:revert:local ``` **Admin Dashboard**: Access the admin dashboard at `/admin` with these default credentials (in development): + - Admin user: test-admin@giveth.io / admin - Campaign manager: campaignManager@giveth.io / admin - Reviewer: reviewer@giveth.io / admin - Operator: operator@giveth.io / admin Creating an admin user manually: + ```sql -- First generate the hash with bcrypt const bcrypt = require('bcrypt'); @@ -238,16 +269,16 @@ INSERT INTO public.user (email, "walletAddress", role, "loginType", name, "encry ### Project Statuses -| ID | Symbol | Name | Description | Who can change to | -|----|---------------|---------------|-------------------------------------------------------------|-------------------------| -| 1 | rejected | rejected | Project rejected by Giveth or platform owner | | -| 2 | pending | pending | Project created, pending approval | | -| 3 | clarification | clarification | Clarification requested by Giveth or platform owner | | -| 4 | verification | verification | Verification in progress (including KYC) | | -| 5 | activated | activated | Active project | project owner and admin | -| 6 | deactivated | deactivated | Deactivated by user or Giveth Admin | project owner and admin | -| 7 | cancelled | cancelled | Cancelled by Giveth Admin | admin | -| 8 | drafted | drafted | Project draft for a potential new project, can be discarded | project owner | +| ID | Symbol | Name | Description | Who can change to | +| --- | ------------- | ------------- | ----------------------------------------------------------- | ----------------------- | +| 1 | rejected | rejected | Project rejected by Giveth or platform owner | | +| 2 | pending | pending | Project created, pending approval | | +| 3 | clarification | clarification | Clarification requested by Giveth or platform owner | | +| 4 | verification | verification | Verification in progress (including KYC) | | +| 5 | activated | activated | Active project | project owner and admin | +| 6 | deactivated | deactivated | Deactivated by user or Giveth Admin | project owner and admin | +| 7 | cancelled | cancelled | Cancelled by Giveth Admin | admin | +| 8 | drafted | drafted | Project draft for a potential new project, can be discarded | project owner | - If a project is **cancelled**, only admin can activate that - If project is **deactive**, both admins and project owner can activate it @@ -256,17 +287,21 @@ INSERT INTO public.user (email, "walletAddress", role, "loginType", name, "encry ## 5. Deployment Process ### Environments + - **Staging**: Used for pre-release testing - **Production**: Live environment for end users ### Deployment Steps + 1. Changes are pushed to the appropriate branch (staging or master) 2. GitHub Actions automatically runs tests and builds the application 3. If all tests pass, the application is deployed to the corresponding environment 4. Database migrations are automatically applied ### CI/CD Integration + The project uses GitHub Actions for continuous integration and deployment: + - `.github/workflows/staging-pipeline.yml`: Deploys to staging environment - `.github/workflows/master-pipeline.yml`: Deploys to production environment @@ -275,21 +310,25 @@ The project uses GitHub Actions for continuous integration and deployment: ### Common Issues **Database Connection Issues**: + - Verify database credentials in environment variables - Check that the database server is running - Ensure network connectivity between application and database **Authentication Failures**: + - Check JWT_SECRET configuration - Verify OAuth provider settings **Blockchain Integration Issues**: + - Ensure node providers (Infura, etc.) are configured correctly - Check API keys for blockchain explorers ### Logs and Debugging **Viewing Logs**: + ``` # Install bunyan for formatted logs npm i -g bunyan @@ -297,6 +336,7 @@ tail -f logs/impact-graph.log | bunyan ``` **Development Debugging**: + 1. Use GraphQL playground at `/graphql` to test queries 2. Check server logs for detailed error information 3. Use the AdminJS interface to inspect data diff --git a/docker-compose-ram.yml b/docker-compose-ram.yml new file mode 100644 index 000000000..7d8e57da0 --- /dev/null +++ b/docker-compose-ram.yml @@ -0,0 +1,339 @@ +version: "3.9" + +services: + ## Impact-Graph + # Multiple GraphQL API instances (for load balancing) + impact-graph-ql1: + container_name: impact-graph-ql1 + image: ${IMPACT_GRAPH_IMAGE:-ghcr.io/giveth/impact-graph:staging} + command: npm run start:docker:server + init: true + security_opt: + - no-new-privileges:true + cap_drop: + - ALL + read_only: true + tmpfs: + - /tmp + environment: + - NODE_ENV=production + - ENVIRONMENT=staging + - REDIS_HOST=redis-all + - REDIS_PORT=6379 + - LOG_PATH=/usr/src/app/logs/impact-graph-graphql-1.log + - ENABLE_GRAPHQL=true + - ENABLE_CRONJOBS=false + - HOST=0.0.0.0 + - PORT=4000 + restart: always + volumes: + - ./config/impact-graph:/usr/src/app/config:ro + - ./config/impact-graph:/usr/src/app/build/config:ro + - ./logs/impact-graph:/usr/src/app/logs:rw + healthcheck: + test: ["CMD-SHELL", "curl -f http://localhost:4000/health || exit 1"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 60s + depends_on: + - redis-all + - auth + - notification-center + networks: + - giveth + + impact-graph-ql2: + container_name: impact-graph-ql2 + image: ${IMPACT_GRAPH_IMAGE:-ghcr.io/giveth/impact-graph:staging} + command: npm run production + init: true + security_opt: + - no-new-privileges:true + cap_drop: + - ALL + read_only: true + tmpfs: + - /tmp + environment: + - ENVIRONMENT=staging + - LOG_PATH=/usr/src/app/logs/impact-graph-graphql-2.log + - ENABLE_GRAPHQL=true + - ENABLE_CRONJOBS=false + - HOST=0.0.0.0 + - PORT=4000 + restart: always + volumes: + - ./config/impact-graph:/usr/src/app/config:ro + - ./config/impact-graph/production.env:/usr/src/app/build/config/production.env:ro + - ./logs/impact-graph:/usr/src/app/logs:rw + healthcheck: + test: ["CMD-SHELL", "curl -f http://localhost:4000/health || exit 1"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 60s + depends_on: + - redis-all + - auth + - notification-center + networks: + - giveth + + impact-graph-ql3: + container_name: impact-graph-ql3 + image: ${IMPACT_GRAPH_IMAGE:-ghcr.io/giveth/impact-graph:staging} + command: npm run production + init: true + security_opt: + - no-new-privileges:true + cap_drop: + - ALL + read_only: true + tmpfs: + - /tmp + environment: + - ENVIRONMENT=staging + - LOG_PATH=/usr/src/app/logs/impact-graph-graphql-3.log + - ENABLE_GRAPHQL=true + - ENABLE_CRONJOBS=false + - HOST=0.0.0.0 + - PORT=4000 + restart: always + volumes: + - ./config/impact-graph:/usr/src/app/config:ro + - ./config/impact-graph/production.env:/usr/src/app/build/config/production.env:ro + - ./logs/impact-graph:/usr/src/app/logs:rw + healthcheck: + test: ["CMD-SHELL", "curl -f http://localhost:4000/health || exit 1"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 60s + depends_on: + - redis-all + - auth + - notification-center + networks: + - giveth + + impact-graph-ql4: + container_name: impact-graph-ql4 + image: ${IMPACT_GRAPH_IMAGE:-ghcr.io/giveth/impact-graph:staging} + command: npm run production + init: true + security_opt: + - no-new-privileges:true + cap_drop: + - ALL + read_only: true + tmpfs: + - /tmp + environment: + - ENVIRONMENT=staging + - LOG_PATH=/usr/src/app/logs/impact-graph-graphql-4.log + - ENABLE_GRAPHQL=true + - ENABLE_CRONJOBS=false + - HOST=0.0.0.0 + - PORT=4000 + restart: always + volumes: + - ./config/impact-graph:/usr/src/app/config:ro + - ./config/impact-graph/production.env:/usr/src/app/build/config/production.env:ro + - ./logs/impact-graph:/usr/src/app/logs:rw + healthcheck: + test: ["CMD-SHELL", "curl -f http://localhost:4000/health || exit 1"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 60s + depends_on: + - redis-all + - auth + - notification-center + networks: + - giveth + + # Background jobs instance (dedicated for cronjobs only) + impact-graph-jobs: + container_name: impact-graph-jobs + image: ${IMPACT_GRAPH_IMAGE:-ghcr.io/giveth/impact-graph:staging} + command: npm run production + init: true + security_opt: + - no-new-privileges:true + cap_drop: + - ALL + read_only: true + tmpfs: + - /tmp + environment: + - NODE_ENV=production + - LOG_PATH=/usr/src/app/logs/impact-graph-jobs.log + - ENABLE_GRAPHQL=false + - ENABLE_CRONJOBS=true + - HOST=0.0.0.0 + - PORT=4000 + restart: always + volumes: + - ./config/impact-graph:/usr/src/app/config:ro + - ./config/impact-graph/production.env:/usr/src/app/build/config/production.env:ro + - ./logs/impact-graph:/usr/src/app/logs:rw + healthcheck: + test: ["CMD-SHELL", "curl -f http://localhost:4000/health || exit 1"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 60s + depends_on: + - redis-all + networks: + - giveth + + #Authentication Service + auth: + image: ghcr.io/giveth/siweauthmicroservice:staging + restart: unless-stopped + command: npm run start:server:staging + init: true + security_opt: + - no-new-privileges:true + cap_drop: + - ALL + read_only: true + tmpfs: + - /tmp + environment: + - NODE_ENV=staging + networks: + - giveth + volumes: + - ./config/auth:/usr/src/app/dist/config:ro + - ./config/auth:/usr/src/app/config:ro + - ./logs/auth:/usr/src/app/dist/logs:rw + depends_on: + - redis-all + + #Notification-Center + notification-center: + image: ghcr.io/giveth/notification-center:staging + command: npm run start:server:staging + restart: unless-stopped + init: true + security_opt: + - no-new-privileges:true + cap_drop: + - ALL + read_only: true + tmpfs: + - /tmp + environment: + - NODE_ENV=staging + networks: + - giveth + volumes: + - ./config/notification-center:/usr/src/app/dist/config:ro + - ./config/notification-center:/usr/src/app/config:ro + - ./logs/notification-center:/usr/src/app/dist/logs:rw + depends_on: + - redis-all + + # GivEconomy-Notification-Service + givEconomy-notification-service: + image: ghcr.io/giveth/giveconomy-notification-service:staging + command: npm run start:docker:server + init: true + security_opt: + - no-new-privileges:true + cap_drop: + - ALL + read_only: true + tmpfs: + - /tmp + environment: + - NODE_ENV=staging + restart: unless-stopped + volumes: + - ./config/giveconomy-notification-service:/usr/src/app/config:ro + - ./logs/giveconomy-notification-service:/usr/src/app/logs:rw + - ./data/giveconomy-notification-service:/usr/src/app/data:rw + networks: + - giveth + + # REDIS + redis-all: + image: ghcr.io/giveth/giveth-redis:latest + container_name: redis-all + command: redis-server --requirepass ${REDIS_PASSWORD:-changeme123} + init: true + security_opt: + - no-new-privileges:true + cap_drop: + - ALL + networks: + - giveth + environment: + - REDIS_PASSWORD=${REDIS_PASSWORD:-changeme123} + restart: always + volumes: + - redis-data:/data + healthcheck: + test: ["CMD", "redis-cli", "--raw", "incr", "ping"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 10s + + # Caddy + caddy: + image: ghcr.io/giveth/giveth-caddy:latest + container_name: caddy-reverse-proxy + restart: unless-stopped + init: true + security_opt: + - no-new-privileges:true + cap_drop: + - ALL + cap_add: + - NET_BIND_SERVICE # Required for binding to ports 80 and 443 + networks: + - giveth + ports: + - 80:80 + - 443:443 + env_file: + - .env + environment: + IMPACT_GRAPH_URL: ${IMPACT_GRAPH_URL:-} + AUTH_SERVICE_URL: ${AUTH_SERVICE_URL:-} + NOTIFICATION_CENTER_URL: ${NOTIFICATION_CENTER_URL:-} + # APIGIV_URL = ${APIGIV_URL:-} + RESTRICTED_PATHS: ${RESTRICTED_PATHS:-} + IP_WHITELIST: ${IP_WHITELIST:-} + IG_WHITELIST_RATE_EVENTS: ${IG_WHITELIST_RATE_EVENTS:-} + IG_WHITELIST_RATE_INTERVAL: ${IG_WHITELIST_RATE_INTERVAL:-} + IG_PUBLIC_RATE_EVENTS: ${IG_PUBLIC_RATE_EVENTS:-} + IG_PUBLIC_RATE_INTERVAL: ${IG_PUBLIC_RATE_INTERVAL:-} + DOMAIN_WHITELIST: ${DOMAIN_WHITELIST:-} + volumes: + - caddy_data:/data + - caddy_config:/config + - ./packages/frontend/caddy/Caddyfile:/etc/caddy/Caddyfile:ro + - ./logs/caddy:/usr/src/app/:rw + depends_on: + - impact-graph-ql1 + - impact-graph-ql2 + - impact-graph-ql3 + - impact-graph-ql4 + - impact-graph-jobs + - auth + - notification-center + +volumes: + redis-data: + caddy_data: + caddy_config: + +networks: + giveth: + driver: bridge \ No newline at end of file diff --git a/migration/1731010000000-AddTwitterAndTelegramToUser.ts b/migration/1731010000000-AddTwitterAndTelegramToUser.ts new file mode 100644 index 000000000..2503ab8d0 --- /dev/null +++ b/migration/1731010000000-AddTwitterAndTelegramToUser.ts @@ -0,0 +1,35 @@ +import { MigrationInterface, QueryRunner } from 'typeorm'; + +export class AddTwitterAndTelegramToUser1731010000000 + implements MigrationInterface +{ + name = 'AddTwitterAndTelegramToUser1731010000000'; + + public async up(queryRunner: QueryRunner): Promise { + // Add twitterName column to user table + await queryRunner.query(` + ALTER TABLE "user" + ADD COLUMN "twitterName" character varying + `); + + // Add telegramName column to user table + await queryRunner.query(` + ALTER TABLE "user" + ADD COLUMN "telegramName" character varying + `); + } + + public async down(queryRunner: QueryRunner): Promise { + // Drop telegramName column + await queryRunner.query(` + ALTER TABLE "user" + DROP COLUMN IF EXISTS "telegramName" + `); + + // Drop twitterName column + await queryRunner.query(` + ALTER TABLE "user" + DROP COLUMN IF EXISTS "twitterName" + `); + } +} diff --git a/migration/1779182511003-AddUniqueConstraintToProjectQfRounds.ts b/migration/1779182511003-AddUniqueConstraintToProjectQfRounds.ts new file mode 100644 index 000000000..92a41b9ad --- /dev/null +++ b/migration/1779182511003-AddUniqueConstraintToProjectQfRounds.ts @@ -0,0 +1,23 @@ +import { MigrationInterface, QueryRunner } from 'typeorm'; + +export class AddUniqueConstraintToProjectQfRounds1779182511003 + implements MigrationInterface +{ + name = 'AddUniqueConstraintToProjectQfRounds1779182511003'; + + public async up(queryRunner: QueryRunner): Promise { + // Add UNIQUE constraint to match entity definition + await queryRunner.query(` + ALTER TABLE "project_qf_rounds_qf_round" + ADD CONSTRAINT "UQ_project_qf_rounds_project_qf_round" UNIQUE ("projectId", "qfRoundId") + `); + } + + public async down(queryRunner: QueryRunner): Promise { + // Drop UNIQUE constraint + await queryRunner.query(` + ALTER TABLE "project_qf_rounds_qf_round" + DROP CONSTRAINT IF EXISTS "UQ_project_qf_rounds_project_qf_round" + `); + } +} diff --git a/package-lock.json b/package-lock.json index 8c0a4a314..29c75539f 100644 --- a/package-lock.json +++ b/package-lock.json @@ -71,7 +71,7 @@ "patch-package": "^6.5.1", "rate-limit-redis": "^4.2.0", "reflect-metadata": "^0.1.13", - "siwe": "^1.1.6", + "siwe": "^3.0.0", "slugify": "^1.4.7", "stripe": "^8.137.0", "threads": "^1.7.0", @@ -4385,8 +4385,7 @@ "type": "individual", "url": "https://paulmillr.com/funding/" } - ], - "peer": true + ] }, "node_modules/@noble/secp256k1": { "version": "1.7.1", @@ -5826,11 +5825,13 @@ } }, "node_modules/@spruceid/siwe-parser": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/@spruceid/siwe-parser/-/siwe-parser-1.1.3.tgz", - "integrity": "sha512-oQ8PcwDqjGWJvLmvAF2yzd6iniiWxK0Qtz+Dw+gLD/W5zOQJiKIUXwslHOm8VB8OOOKW9vfR3dnPBhHaZDvRsw==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@spruceid/siwe-parser/-/siwe-parser-3.0.0.tgz", + "integrity": "sha512-Y92k63ilw/8jH9Ry4G2e7lQd0jZAvb0d/Q7ssSD0D9mp/Zt2aCXIc3g0ny9yhplpAx1QXHsMz/JJptHK/zDGdw==", + "license": "Apache-2.0", "dependencies": { - "apg-js": "^4.1.1" + "@noble/hashes": "^1.1.2", + "apg-js": "^4.4.0" } }, "node_modules/@sqltools/formatter": { @@ -7959,9 +7960,10 @@ } }, "node_modules/apg-js": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/apg-js/-/apg-js-4.1.2.tgz", - "integrity": "sha512-2OALKUe82NLVPe4NTooom8NykWIa2D7YxO7jG1pgnYWnkfhTUriXpITmLvVD8k8TzDfa9G5O4y8rPe2/uUB1Bg==" + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/apg-js/-/apg-js-4.4.0.tgz", + "integrity": "sha512-fefmXFknJmtgtNEXfPwZKYkMFX4Fyeyz+fNF6JWp87biGOPslJbCBVU158zvKRZfHBKnJDy8CMM40oLFGkXT8Q==", + "license": "BSD-2-Clause" }, "node_modules/app-root-path": { "version": "3.1.0", @@ -18934,16 +18936,16 @@ } }, "node_modules/siwe": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/siwe/-/siwe-1.1.6.tgz", - "integrity": "sha512-3WRdEil32Tc2vuNzqJ2/Z/MIvsvy0Nkzc2ov+QujmpHO7tM83dgcb47z0Pu236T4JQkOQCqQkq3AJ/rVIezniA==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/siwe/-/siwe-3.0.0.tgz", + "integrity": "sha512-P2/ry7dHYJA6JJ5+veS//Gn2XDwNb3JMvuD6xiXX8L/PJ1SNVD4a3a8xqEbmANx+7kNQcD8YAh1B9bNKKvRy/g==", + "license": "Apache-2.0", "dependencies": { - "@spruceid/siwe-parser": "^1.1.3", - "@stablelib/random": "^1.0.1", - "apg-js": "^4.1.1" + "@spruceid/siwe-parser": "^3.0.0", + "@stablelib/random": "^1.0.1" }, "peerDependencies": { - "ethers": "5.5.1" + "ethers": "^5.6.8 || ^6.0.8" } }, "node_modules/slash": { diff --git a/package.json b/package.json index ff0e43515..b75c703ae 100644 --- a/package.json +++ b/package.json @@ -65,7 +65,7 @@ "patch-package": "^6.5.1", "rate-limit-redis": "^4.2.0", "reflect-metadata": "^0.1.13", - "siwe": "^1.1.6", + "siwe": "^3.0.0", "slugify": "^1.4.7", "stripe": "^8.137.0", "threads": "^1.7.0", @@ -231,11 +231,12 @@ "db:migrate:revert:local": "NODE_ENV=development npx typeorm-ts-node-commonjs migration:revert -d ./src/ormconfig.ts", "db:migrate:create:local": "NODE_ENV=development npx typeorm-ts-node-commonjs migration:generate migration/$MIGRATION_NAME -d ./src/ormconfig.ts", "db:migrate:run:production": "NODE_ENV=production npx typeorm-ts-node-commonjs migration:run -d ./src/ormconfig.ts", - "db:migrate:rever:productiont": "NODE_ENV=production npx typeorm-ts-node-commonjs migration:revert -d ./src/ormconfig.ts", + "db:migrate:revert:production": "NODE_ENV=production npx typeorm-ts-node-commonjs migration:revert -d ./src/ormconfig.ts", "prettify": "prettier --write '**/*.ts*' '**/*.test.ts*'", "db:migrate:seedToken:run": "NODE_ENV=development ts-node ./node_modules/typeorm/cli -f ./src/seedToken-ormconfig.ts migration:run", "db:migrate:seedToken:revert": "NODE_ENV=development ts-node ./node_modules/typeorm/cli -f ./src/seedToken-ormconfig.ts migration:revert", "build": "rm -rf ./build && tsc && mkdir ./build/config && mkdir ./build/src/server/adminJs/tabs/components && cp -r src/server/adminJs/tabs/components/* ./build/src/server/adminJs/tabs/components/ && mkdir ./build/src/utils/locales && cp -r ./src/utils/locales/* ./build/src/utils/locales/ && cp -r ./src/abi build/src/abi ", + "build:ci": "rm -rf ./build && tsc && mkdir ./build/config && mkdir ./build/src/server/adminJs/tabs/components && cp -r src/server/adminJs/tabs/components/* ./build/src/server/adminJs/tabs/components/ && mkdir ./build/src/utils/locales && cp -r ./src/utils/locales/* ./build/src/utils/locales/ && cp -r ./src/abi build/src/abi && cp ./config/test.env build/config/test.env && cp -r ./test/images build/test/images", "dev": "NODE_ENV=development node ./build/src/index.js", "production": "NODE_ENV=production node ./build/src/index.js", "start:docker:server": "npm run db:migrate:run:production && npm run production", diff --git a/src/adapters/notifications/NotificationCenterAdapter.ts b/src/adapters/notifications/NotificationCenterAdapter.ts index 4783ece2f..b55248b43 100644 --- a/src/adapters/notifications/NotificationCenterAdapter.ts +++ b/src/adapters/notifications/NotificationCenterAdapter.ts @@ -20,6 +20,7 @@ import { toFixNumber } from '../../services/donationService'; import { getTokenPrice } from '../../services/priceService'; import { logger } from '../../utils/logger'; import { buildTxLink } from '../../utils/networks'; +import { validateEmailWithRegex } from '../../utils/user'; import { createBasicAuthentication, isProduction } from '../../utils/utils'; import { BroadCastNotificationInputParams, @@ -165,6 +166,12 @@ export class NotificationCenterAdapter implements NotificationAdapterInterface { async createOrttoProfile(user: User): Promise { try { const { id, email, firstName, lastName } = user; + + // Only create Ortto profile if user has an email address + if (!email || !validateEmailWithRegex(email)) { + return; + } + await callSendNotification({ eventName: NOTIFICATIONS_EVENT_NAMES.CREATE_ORTTO_PROFILE, trackId: 'create-ortto-profile-' + user.id, @@ -179,7 +186,18 @@ export class NotificationCenterAdapter implements NotificationAdapterInterface { } async updateOrttoPeople(people: OrttoPerson[]): Promise { - // TODO we should me this to notification-center, it's not good that we call Ortto directly + // TODO we should move this to notification-center, it's not good that we call Ortto directly + + // Only include people with valid email addresses + const peopleWithEmail = people.filter(person => { + const email = person.fields['str::email']; + return email && validateEmailWithRegex(email); + }); + + if (peopleWithEmail.length === 0) { + return; + } + const merge_by: string[] = []; if (isProduction) { merge_by.push('str:cm:user-id'); @@ -188,11 +206,11 @@ export class NotificationCenterAdapter implements NotificationAdapterInterface { } try { const data = { - people, + people: peopleWithEmail, async: false, merge_by, }; - logger.debug('updateOrttoPeople has been called:', people); + logger.debug('updateOrttoPeople has been called:', peopleWithEmail); const orttoConfig = { method: 'post', maxBodyLength: Infinity, diff --git a/src/entities/project.ts b/src/entities/project.ts index 41e920b7c..b73193554 100644 --- a/src/entities/project.ts +++ b/src/entities/project.ts @@ -2,13 +2,16 @@ import { Field, Float, ID, ObjectType } from 'type-graphql'; import { AfterInsert, AfterUpdate, - BeforeUpdate, - BeforeInsert, BaseEntity, + BeforeInsert, BeforeRemove, + BeforeUpdate, + ChildEntity, Column, Entity, Index, + JoinColumn, + JoinTable, LessThan, ManyToMany, ManyToOne, @@ -16,44 +19,43 @@ import { OneToOne, PrimaryGeneratedColumn, RelationId, - JoinTable, - JoinColumn, - ChildEntity, TableInheritance, } from 'typeorm'; import { Int } from 'type-graphql/dist/scalars/aliases'; -import { Donation } from './donation'; -import { Reaction } from './reaction'; -import { User } from './user'; -import { ProjectStatus } from './projectStatus'; -import { ProjectStatusHistory } from './projectStatusHistory'; -import { ProjectStatusReason } from './projectStatusReason'; -import { i18n, translationErrorMessagesKeys } from '../utils/errorMessages'; -import { Organization } from './organization'; -import { findUserById } from '../repositories/userRepository'; -import { SocialProfile } from './socialProfile'; -import { ProjectVerificationForm } from './projectVerificationForm'; -import { ProjectAddress } from './projectAddress'; -import { ProjectContacts } from './projectVerificationForm'; -import { ProjectPowerView } from '../views/projectPowerView'; -import { ProjectFuturePowerView } from '../views/projectFuturePowerView'; -import { ProjectInstantPowerView } from '../views/projectInstantPowerView'; -import { Category } from './category'; -import { FeaturedUpdate } from './featuredUpdate'; -import { getHtmlTextSummary } from '../utils/utils'; -import { QfRound } from './qfRound'; -import { ProjectQfRound } from './projectQfRound'; import { findActiveQfRounds, getProjectDonationsSqrtRootSumInAllQfRounds, getQfRoundTotalSqrtRootSumSquaredInAllQfRounds, } from '../repositories/qfRoundRepository'; +import { findUserById } from '../repositories/userRepository'; import { EstimatedMatchingByQfRound } from '../types/qfTypes'; +import { i18n, translationErrorMessagesKeys } from '../utils/errorMessages'; +import { getHtmlTextSummary } from '../utils/utils'; +import { ProjectFuturePowerView } from '../views/projectFuturePowerView'; +import { ProjectInstantPowerView } from '../views/projectInstantPowerView'; +import { ProjectPowerView } from '../views/projectPowerView'; +import { AnchorContractAddress } from './anchorContractAddress'; import { Campaign } from './campaign'; +import { Category } from './category'; +import { Donation } from './donation'; +import { FeaturedUpdate } from './featuredUpdate'; +import { Organization } from './organization'; +import { ProjectAddress } from './projectAddress'; import { ProjectEstimatedMatchingView } from './ProjectEstimatedMatchingView'; -import { AnchorContractAddress } from './anchorContractAddress'; +import { ProjectQfRound } from './projectQfRound'; import { ProjectSocialMedia } from './projectSocialMedia'; +import { ProjectStatus } from './projectStatus'; +import { ProjectStatusHistory } from './projectStatusHistory'; +import { ProjectStatusReason } from './projectStatusReason'; +import { + ProjectContacts, + ProjectVerificationForm, +} from './projectVerificationForm'; +import { QfRound } from './qfRound'; +import { Reaction } from './reaction'; +import { SocialProfile } from './socialProfile'; +import { User } from './user'; // eslint-disable-next-line @typescript-eslint/no-var-requires const moment = require('moment'); @@ -235,7 +237,12 @@ export class Project extends BaseEntity { @ManyToMany(_type => QfRound, qfRound => qfRound.projects, { nullable: true, }) - @JoinTable() + @JoinTable({ + name: 'project_qf_rounds_qf_round', + joinColumn: { name: 'projectId', referencedColumnName: 'id' }, + inverseJoinColumn: { name: 'qfRoundId', referencedColumnName: 'id' }, + synchronize: false, + }) qfRounds: QfRound[]; @Field(_type => [ProjectQfRound], { nullable: true }) @@ -250,6 +257,7 @@ export class Project extends BaseEntity { @Column({ nullable: true }) stripeAccountId?: string; + // It's not used anymore, we should remove it, we use projectAddress instead @Field({ nullable: true }) @Column({ unique: true, nullable: true }) walletAddress?: string; diff --git a/src/entities/projectQfRound.ts b/src/entities/projectQfRound.ts index 55d2a4593..166c4467d 100644 --- a/src/entities/projectQfRound.ts +++ b/src/entities/projectQfRound.ts @@ -1,6 +1,6 @@ import { Field, ID, ObjectType, Float, Int } from 'type-graphql'; import { - PrimaryColumn, + PrimaryGeneratedColumn, Column, Entity, ManyToOne, @@ -8,24 +8,27 @@ import { CreateDateColumn, UpdateDateColumn, Index, + Unique, } from 'typeorm'; import { Project } from './project'; import { QfRound } from './qfRound'; @Entity('project_qf_rounds_qf_round') @ObjectType() +@Unique(['projectId', 'qfRoundId']) export class ProjectQfRound extends BaseEntity { @Field(_type => ID) - @Column({ generated: 'increment' }) - @Index() + @PrimaryGeneratedColumn() id: number; @Field(_type => ID) - @PrimaryColumn() + @Column() + @Index() projectId: number; @Field(_type => ID) - @PrimaryColumn() + @Column() + @Index() qfRoundId: number; @ManyToOne(_type => Project, project => project.projectQfRoundRelations) diff --git a/src/entities/user.ts b/src/entities/user.ts index fe0d6ae99..ea120b637 100644 --- a/src/entities/user.ts +++ b/src/entities/user.ts @@ -45,7 +45,6 @@ export const publicSelectionFields = [ export enum UserRole { // Normal users, not admin RESTRICTED = 'restricted', - ADMIN = 'admin', OPERATOR = 'operator', VERIFICATION_FORM_REVIEWER = 'reviewer', @@ -120,6 +119,14 @@ export class User extends BaseEntity { @Column({ nullable: true }) url?: string; + @Field(_type => String, { nullable: true }) + @Column({ nullable: true }) + twitterName?: string; + + @Field(_type => String, { nullable: true }) + @Column({ nullable: true }) + telegramName?: string; + @Field(_type => Float, { nullable: true }) @Column({ type: 'real', nullable: true, default: null }) passportScore?: number; diff --git a/src/repositories/previousRoundRankRepository.ts b/src/repositories/previousRoundRankRepository.ts index b91d584b6..8b79e20f8 100644 --- a/src/repositories/previousRoundRankRepository.ts +++ b/src/repositories/previousRoundRankRepository.ts @@ -1,5 +1,6 @@ import { PowerSnapshot } from '../entities/powerSnapshot'; import { PreviousRoundRank } from '../entities/previousRoundRank'; +import { AppDataSource } from '../orm'; export const deleteAllPreviousRoundRanks = async () => { return PreviousRoundRank.query( @@ -10,17 +11,34 @@ export const deleteAllPreviousRoundRanks = async () => { }; export const copyProjectRanksToPreviousRoundRankTable = async () => { - await deleteAllPreviousRoundRanks(); - return PreviousRoundRank.query( - ` - INSERT INTO previous_round_rank ("projectId", round, rank) - SELECT DISTINCT project_power_view."projectId", project_power_view.round, project_power_view."powerRank" - FROM project_power_view - ON CONFLICT (round, "projectId") DO UPDATE SET - rank = EXCLUDED.rank, - "updatedAt" = NOW(); - `, - ); + // Use a transaction to prevent deadlocks + const queryRunner = AppDataSource.getDataSource().createQueryRunner(); + + await queryRunner.connect(); + await queryRunner.startTransaction(); + + try { + // Delete and insert in a single transaction to prevent deadlocks + await queryRunner.query(` + DELETE FROM previous_round_rank + `); + + await queryRunner.query(` + INSERT INTO previous_round_rank ("projectId", round, rank) + SELECT DISTINCT project_power_view."projectId", project_power_view.round, project_power_view."powerRank" + FROM project_power_view + ON CONFLICT (round, "projectId") DO UPDATE SET + rank = EXCLUDED.rank, + "updatedAt" = NOW() + `); + + await queryRunner.commitTransaction(); + } catch (error) { + await queryRunner.rollbackTransaction(); + throw error; + } finally { + await queryRunner.release(); + } }; export const projectsThatTheirRanksHaveChanged = async (): Promise< diff --git a/src/repositories/projectRepository.test.ts b/src/repositories/projectRepository.test.ts index aba5f10d7..3d4fb9155 100644 --- a/src/repositories/projectRepository.test.ts +++ b/src/repositories/projectRepository.test.ts @@ -1168,22 +1168,26 @@ function findQfRoundProjectsTestCases() { project2.qfRounds = [qfRound]; await project2.save(); - // Create ProjectQfRound entries with different donation amounts - const projectQfRound1 = ProjectQfRound.create({ - projectId: project1.id, - qfRoundId: qfRound.id, - sumDonationValueUsd: 100, - countUniqueDonors: 5, - }); - await projectQfRound1.save(); + // Upsert ProjectQfRound entries with different donation amounts + await ProjectQfRound.upsert( + { + projectId: project1.id, + qfRoundId: qfRound.id, + sumDonationValueUsd: 100, + countUniqueDonors: 5, + }, + ['projectId', 'qfRoundId'], + ); - const projectQfRound2 = ProjectQfRound.create({ - projectId: project2.id, - qfRoundId: qfRound.id, - sumDonationValueUsd: 500, - countUniqueDonors: 10, - }); - await projectQfRound2.save(); + await ProjectQfRound.upsert( + { + projectId: project2.id, + qfRoundId: qfRound.id, + sumDonationValueUsd: 500, + countUniqueDonors: 10, + }, + ['projectId', 'qfRoundId'], + ); // Test sorting by ActiveQfRoundRaisedFunds const [sortedProjects] = await findQfRoundProjects(qfRound.id, { diff --git a/src/repositories/qfRoundRepository.ts b/src/repositories/qfRoundRepository.ts index 73e6e4023..c036b8a12 100644 --- a/src/repositories/qfRoundRepository.ts +++ b/src/repositories/qfRoundRepository.ts @@ -117,6 +117,9 @@ export class QFArchivedRounds { @Field(_type => String, { nullable: true }) bannerMobile: string; + + @Field(_type => String, { nullable: true }) + hubCardImage: string; } export const findArchivedQfRounds = async ( @@ -150,6 +153,7 @@ export const findArchivedQfRounds = async ( .addSelect('qfRound.bannerBgImage', 'bannerBgImage') .addSelect('qfRound.bannerFull', 'bannerFull') .addSelect('qfRound.bannerMobile', 'bannerMobile') + .addSelect('qfRound.hubCardImage', 'hubCardImage') .addSelect( qb => qb @@ -183,9 +187,6 @@ export const findArchivedQfRounds = async ( .andWhere( 'donation.createdAt BETWEEN qfRound.beginDate AND qfRound.endDate', ) - .andWhere( - '(user.passportScore >= qfRound.minimumPassportScore OR user.passportScore IS NULL)', - ) .andWhere('sybil.id IS NULL') .andWhere('projectFraud.id IS NULL'), 'uniqueDonors', diff --git a/src/resolvers/donationResolver.ts b/src/resolvers/donationResolver.ts index 32e5f9d9f..c70d5e388 100644 --- a/src/resolvers/donationResolver.ts +++ b/src/resolvers/donationResolver.ts @@ -1,3 +1,5 @@ +import { Max, Min } from 'class-validator'; +import { GraphQLJSON } from 'graphql-scalars'; import { Arg, Args, @@ -13,36 +15,24 @@ import { registerEnumType, Resolver, } from 'type-graphql'; -import { GraphQLJSON } from 'graphql-scalars'; import { Service } from 'typedi'; -import { Max, Min } from 'class-validator'; import { Brackets, In, Repository } from 'typeorm'; import { Donation, DONATION_STATUS, SortField } from '../entities/donation'; -import { ApolloContext } from '../types/ApolloContext'; +import { + DRAFT_DONATION_STATUS, + DraftDonation, +} from '../entities/draftDonation'; +import { MainCategory } from '../entities/mainCategory'; +import { ORGANIZATION_LABELS } from '../entities/organization'; import { Project, ProjStatus } from '../entities/project'; +import { + SWAP_TRANSACTION_STATUS, + SwapTransaction, +} from '../entities/swapTransaction'; import { Token } from '../entities/token'; import { publicSelectionFields } from '../entities/user'; -import SentryLogger from '../sentryLogger'; -import { i18n, translationErrorMessagesKeys } from '../utils/errorMessages'; +import { AppDataSource } from '../orm'; import { NETWORK_IDS } from '../provider'; -import { - getDonationToGivethWithDonationBoxMetrics, - isTokenAcceptableForProject, - syncDonationStatusWithBlockchainNetwork, - updateDonationPricesAndValues, -} from '../services/donationService'; -import { - createDonationQueryValidator, - getDonationsQueryValidator, - resourcePerDateReportValidator, - updateDonationQueryValidator, - validateWithJoiSchema, -} from '../utils/validators/graphqlQueryValidators'; -import { logger } from '../utils/logger'; -import { - findUserById, - setUserAsReferrer, -} from '../repositories/userRepository'; import { donationsNumberPerDateRange, donationsTotalAmountPerDateRange, @@ -57,30 +47,40 @@ import { newDonorsCount, newDonorsDonationTotalUsd, } from '../repositories/donationRepository'; -import { sleep } from '../utils/utils'; +import { markDraftDonationStatusMatched } from '../repositories/draftDonationRepository'; import { findProjectRecipientAddressByNetworkId } from '../repositories/projectAddressRepository'; -import { MainCategory } from '../entities/mainCategory'; import { findProjectById } from '../repositories/projectRepository'; -import { AppDataSource } from '../orm'; -import { getChainvineReferralInfoForDonation } from '../services/chainvineReferralService'; -import { relatedActiveQfRoundForProject } from '../services/qfRoundService'; import { findQfRoundById } from '../repositories/qfRoundRepository'; -import { detectAddressChainType } from '../utils/networks'; -import { ChainType } from '../types/network'; +import { nonZeroRecurringDonationsByProjectId } from '../repositories/recurringDonationRepository'; +import { + findUserById, + setUserAsReferrer, +} from '../repositories/userRepository'; +import SentryLogger from '../sentryLogger'; import { getAppropriateNetworkId } from '../services/chains'; -import { markDraftDonationStatusMatched } from '../repositories/draftDonationRepository'; +import { getChainvineReferralInfoForDonation } from '../services/chainvineReferralService'; import { - DRAFT_DONATION_STATUS, - DraftDonation, -} from '../entities/draftDonation'; -import { nonZeroRecurringDonationsByProjectId } from '../repositories/recurringDonationRepository'; -import { ORGANIZATION_LABELS } from '../entities/organization'; + getDonationToGivethWithDonationBoxMetrics, + isTokenAcceptableForProject, + syncDonationStatusWithBlockchainNetwork, + updateDonationPricesAndValues, +} from '../services/donationService'; import { getTokenPrice } from '../services/priceService'; +import { relatedActiveQfRoundForProject } from '../services/qfRoundService'; +import { ApolloContext } from '../types/ApolloContext'; +import { ChainType } from '../types/network'; +import { i18n, translationErrorMessagesKeys } from '../utils/errorMessages'; +import { logger } from '../utils/logger'; +import { detectAddressChainType } from '../utils/networks'; import { findTokenByNetworkAndSymbol } from '../utils/tokenUtils'; +import { sleep } from '../utils/utils'; import { - SWAP_TRANSACTION_STATUS, - SwapTransaction, -} from '../entities/swapTransaction'; + createDonationQueryValidator, + getDonationsQueryValidator, + resourcePerDateReportValidator, + updateDonationQueryValidator, + validateWithJoiSchema, +} from '../utils/validators/graphqlQueryValidators'; const draftDonationEnabled = process.env.ENABLE_DRAFT_DONATION === 'true'; @@ -350,6 +350,7 @@ export class DonationResolver { .addSelect(publicSelectionFields) .leftJoinAndSelect('donation.project', 'project') .leftJoinAndSelect('donation.recurringDonation', 'recurringDonation') + .leftJoinAndSelect('donation.swapTransaction', 'swapTransaction') .leftJoinAndSelect('project.categories', 'categories') .leftJoin('project.projectPower', 'projectPower') .addSelect([ diff --git a/src/resolvers/projectResolver.test.ts b/src/resolvers/projectResolver.test.ts index c13624484..0af53191f 100644 --- a/src/resolvers/projectResolver.test.ts +++ b/src/resolvers/projectResolver.test.ts @@ -133,13 +133,29 @@ const createProjectQfRoundRelation = async ( sumDonationValueUsd: number = 0, countUniqueDonors: number = 0, ): Promise => { - const relation = ProjectQfRound.create({ - projectId, - qfRoundId, - sumDonationValueUsd, - countUniqueDonors, - }); - return await relation.save(); + // Check if relation already exists (might have been created by @ManyToMany) + let relation = await ProjectQfRound.findOne({ + where: { + projectId, + qfRoundId, + }, + }); + + if (relation) { + // Update existing relation + relation.sumDonationValueUsd = sumDonationValueUsd; + relation.countUniqueDonors = countUniqueDonors; + return await relation.save(); + } else { + // Create new relation + relation = ProjectQfRound.create({ + projectId, + qfRoundId, + sumDonationValueUsd, + countUniqueDonors, + }); + return await relation.save(); + } }; describe('createProject test cases --->', createProjectTestCases); @@ -6791,6 +6807,7 @@ function qfProjectsTestCases() { }); activeProject.qfRounds = [qfRound]; await activeProject.save(); + await createProjectQfRoundRelation(activeProject.id, qfRound.id, 0, 0); // Create a cancelled project (should not be returned) const cancelledProject = await saveProjectDirectlyToDb({ @@ -6803,6 +6820,7 @@ function qfProjectsTestCases() { }); cancelledProject.qfRounds = [qfRound]; await cancelledProject.save(); + await createProjectQfRoundRelation(cancelledProject.id, qfRound.id, 0, 0); // Create a not reviewed project (should not be returned) const notReviewedProject = await saveProjectDirectlyToDb({ @@ -6815,6 +6833,7 @@ function qfProjectsTestCases() { }); notReviewedProject.qfRounds = [qfRound]; await notReviewedProject.save(); + await createProjectQfRoundRelation(notReviewedProject.id, qfRound.id, 0, 0); // Test the query const result = await axios.post(graphqlUrl, { @@ -6946,6 +6965,7 @@ function qfProjectsTestCases() { }); project1.qfRounds = [qfRound]; await project1.save(); + await createProjectQfRoundRelation(project1.id, qfRound.id, 0, 0); const project2 = await saveProjectDirectlyToDb({ ...createProjectData(), @@ -6960,6 +6980,7 @@ function qfProjectsTestCases() { }); project2.qfRounds = [qfRound]; await project2.save(); + await createProjectQfRoundRelation(project2.id, qfRound.id, 0, 0); const project3 = await saveProjectDirectlyToDb({ ...createProjectData(), @@ -6974,6 +6995,7 @@ function qfProjectsTestCases() { }); project3.qfRounds = [qfRound]; await project3.save(); + await createProjectQfRoundRelation(project3.id, qfRound.id, 0, 0); // Test pagination const paginationResult = await axios.post(graphqlUrl, { diff --git a/src/resolvers/userResolver.test.ts b/src/resolvers/userResolver.test.ts index 643299a92..fded337c0 100644 --- a/src/resolvers/userResolver.test.ts +++ b/src/resolvers/userResolver.test.ts @@ -854,6 +854,34 @@ function updateUserTestCases() { assert.equal(updatedUser?.name, updateUserData.firstName + ' ' + lastName); assert.equal(updatedUser?.lastName, lastName); }); + + it('should update user with twitter and telegram URLs', async () => { + const user = await saveUserDirectlyToDb(generateRandomEtheriumAddress()); + const accessToken = await generateTestAccessToken(user.id); + const updateUserData = { + firstName: 'firstName', + lastName: 'lastName', + email: user.email, + twitterName: 'twusername', + telegramName: 'tgusername', + }; + const result = await axios.post( + graphqlUrl, + { + query: updateUser, + variables: updateUserData, + }, + { + headers: { + Authorization: `Bearer ${accessToken}`, + }, + }, + ); + assert.isTrue(result.data.data.updateUser); + const updatedUser = await User.findOne({ where: { id: user.id } }); + assert.equal(updatedUser?.twitterName, updateUserData.twitterName); + assert.equal(updatedUser?.telegramName, updateUserData.telegramName); + }); } function userEmailVerification() { diff --git a/src/resolvers/userResolver.ts b/src/resolvers/userResolver.ts index afe6b8af9..45fa4c177 100644 --- a/src/resolvers/userResolver.ts +++ b/src/resolvers/userResolver.ts @@ -9,31 +9,34 @@ import { } from 'type-graphql'; import { Repository } from 'typeorm'; -import { User } from '../entities/user'; -import { AccountVerificationInput } from './types/accountVerificationInput'; -import { ApolloContext } from '../types/ApolloContext'; -import { i18n, translationErrorMessagesKeys } from '../utils/errorMessages'; -import { validateEmail } from '../utils/validators/commonValidators'; -import { - findUserById, - findUserByWalletAddress, - isValidEmail, -} from '../repositories/userRepository'; -import { createNewAccountVerification } from '../repositories/accountVerificationRepository'; -import { UserByAddressResponse } from './types/userResolver'; -import { AppDataSource } from '../orm'; import { getGitcoinAdapter, getNotificationAdapter, } from '../adapters/adaptersFactory'; -import { logger } from '../utils/logger'; -import { isWalletAddressInPurpleList } from '../repositories/projectAddressRepository'; -import { addressHasDonated } from '../repositories/donationRepository'; import { getOrttoPersonAttributes } from '../adapters/notifications/NotificationCenterAdapter'; +import { User } from '../entities/user'; +import { AppDataSource } from '../orm'; +import { redis } from '../redis'; +import { createNewAccountVerification } from '../repositories/accountVerificationRepository'; +import { addressHasDonated } from '../repositories/donationRepository'; +import { isWalletAddressInPurpleList } from '../repositories/projectAddressRepository'; import { retrieveActiveQfRoundUserMBDScore } from '../repositories/qfRoundRepository'; +import { + createUserWithPublicAddress, + findUserById, + findUserByWalletAddress, + isValidEmail, +} from '../repositories/userRepository'; import { getLoggedInUser } from '../services/authorizationServices'; -import { generateRandomNumericCode } from '../utils/utils'; +import { syncNewImpactGraphUserToV6Core } from '../services/v6CoreUserSync'; +import { ApolloContext } from '../types/ApolloContext'; +import { i18n, translationErrorMessagesKeys } from '../utils/errorMessages'; +import { logger } from '../utils/logger'; import { isSolanaAddress } from '../utils/networks'; +import { generateRandomNumericCode } from '../utils/utils'; +import { validateEmail } from '../utils/validators/commonValidators'; +import { AccountVerificationInput } from './types/accountVerificationInput'; +import { UserByAddressResponse } from './types/userResolver'; @ObjectType() class UserRelatedAddressResponse { @@ -44,6 +47,18 @@ class UserRelatedAddressResponse { hasDonated: boolean; } +@ObjectType() +class CreateUserByAddressResponse { + @Field(_type => User) + user: User; + + @Field(_type => Boolean) + existing: boolean; + + @Field(_type => String, { nullable: true }) + errorMessage?: string; +} + @Resolver(_of => User) export class UserResolver { constructor(private readonly userRepository: Repository) { @@ -62,6 +77,69 @@ export class UserResolver { }; } + /** + * Public (no JWT) query to check whether a user exists for a given wallet address. + */ + @Query(_returns => Boolean) + async userExistsByAddress(@Arg('address') address: string): Promise { + const found = await User.createQueryBuilder('user') + .select(['user.id']) + .where(`LOWER("walletAddress") = :walletAddress`, { + walletAddress: address.toLowerCase(), + }) + .getOne(); + return Boolean(found); + } + + /** + * Public (no JWT) mutation to create a new user for a given wallet address. + * If the user already exists, it returns the existing user and does NOT trigger the webhook. + * + * After successful creation, it triggers a webhook to v6-core (password in header; values from env vars). + */ + @Mutation(_returns => CreateUserByAddressResponse) + async createUserByAddress( + @Arg('address') address: string, + @Ctx() ctx: ApolloContext, + ): Promise { + // Simple rate limit: 20/day per IP + const requesterIp = + ctx?.expressReq?.ip || + (ctx?.expressReq?.headers?.['x-forwarded-for'] as string | undefined) || + 'unknown'; + const dayKey = new Date().toISOString().slice(0, 10); // YYYY-MM-DD + const rateLimitKey = `rl:createUserByAddress:${requesterIp}:${dayKey}`; + + const current = await redis.incr(rateLimitKey); + if (current === 1) { + // expire in 24h; "per day" approximation + await redis.expire(rateLimitKey, 24 * 60 * 60); + } + if (current > 20) { + throw new Error('Rate limit exceeded (20 per day)'); + } + + const existing = await User.createQueryBuilder('user') + .where(`LOWER("walletAddress") = :walletAddress`, { + walletAddress: address.toLowerCase(), + }) + .getOne(); + if (existing) return { user: existing, existing: true }; + + const createdUser = await createUserWithPublicAddress(address); + let errorMessage = ''; + + try { + await syncNewImpactGraphUserToV6Core(createdUser); + } catch (e) { + // Do not fail user creation if the downstream sync fails, but log it for investigation. + logger.error('createUserByAddress() v6-core sync failed', e); + errorMessage = (e as Error).message || JSON.stringify(e); + } + + return { user: createdUser, existing: false, errorMessage }; + } + @Query(_returns => UserByAddressResponse, { nullable: true }) async userByAddress( @Arg('address', _type => String) address: string, @@ -89,10 +167,18 @@ export class UserResolver { ) { const includeSensitiveFields = user?.walletAddress?.toLowerCase() === address.toLowerCase(); - const foundUser = await findUserByWalletAddress( - address, - includeSensitiveFields, + const query = User.createQueryBuilder('user').where( + `LOWER("walletAddress") = :walletAddress`, + { + walletAddress: address.toLowerCase(), + }, ); + if (!includeSensitiveFields) { + // keep the same behavior as repository: no sensitive fields when caller isn't the owner + const { publicSelectionFields } = await import('../entities/user'); + query.select(publicSelectionFields); + } + const foundUser = await query.getOne(); if (!foundUser) return; @@ -137,6 +223,8 @@ export class UserResolver { @Arg('email', { nullable: true }) email: string, @Arg('url', { nullable: true }) url: string, @Arg('avatar', { nullable: true }) avatar: string, + @Arg('twitterName', { nullable: true }) twitterName: string, + @Arg('telegramName', { nullable: true }) telegramName: string, @Arg('newUser', { nullable: true }) newUser: boolean, @Ctx() { req: { user } }: ApolloContext, ): Promise { @@ -200,6 +288,12 @@ export class UserResolver { if (avatar !== undefined) { dbUser.avatar = avatar; } + if (twitterName !== undefined) { + dbUser.twitterName = twitterName; + } + if (telegramName !== undefined) { + dbUser.telegramName = telegramName; + } dbUser.name = `${dbUser.firstName || ''} ${dbUser.lastName || ''}`.trim(); await dbUser.save(); diff --git a/src/server/adminJs/tabs/donationTab.ts b/src/server/adminJs/tabs/donationTab.ts index 8533abb2f..e19af8ff7 100644 --- a/src/server/adminJs/tabs/donationTab.ts +++ b/src/server/adminJs/tabs/donationTab.ts @@ -10,9 +10,11 @@ import { import { Project } from '../../../entities/project'; import { Token } from '../../../entities/token'; import { NETWORK_IDS } from '../../../provider'; +import { findQfRoundById } from '../../../repositories/qfRoundRepository'; import { findUserByWalletAddress } from '../../../repositories/userRepository'; import { getTwitterDonations } from '../../../services/Idriss/contractDonations'; import { + getTransactionInfoFromNetwork, NetworkTransactionInfo, TransactionDetailInput, } from '../../../services/chains'; @@ -32,6 +34,7 @@ import { updateUserTotalDonated, updateUserTotalReceived, } from '../../../services/userService'; +import { ChainType } from '../../../types/network'; import { i18n, translationErrorMessagesKeys, @@ -67,7 +70,14 @@ export const createDonation = async (request: AdminJsRequestInterface) => { qfRoundId, anonymous, chainType, + fromWalletAddress, + toWalletAddress, + amount, + timestamp, + toWalletMemo, } = request?.payload || {}; + + // Validate required fields if ( !txHash || !transactionNetworkId || @@ -95,6 +105,39 @@ export const createDonation = async (request: AdminJsRequestInterface) => { }, }; } + + // Validate required fields for non-EVM chains + if (chainType !== ChainType.EVM) { + const errors: Record = {}; + if (!fromWalletAddress) + errors.fromWalletAddress = { + message: 'fromWalletAddress is required for non-EVM chains', + }; + if (!toWalletAddress) + errors.toWalletAddress = { + message: 'toWalletAddress is required for non-EVM chains', + }; + if (!amount) + errors.amount = { message: 'amount is required for non-EVM chains' }; + if (!timestamp) + errors.timestamp = { + message: 'timestamp is required for non-EVM chains', + }; + + if (Object.keys(errors).length > 0) { + return { + record: { + params: request?.payload || {}, + errors, + }, + notice: { + message: 'Please fix the highlighted fields for non-EVM donation', + type: 'danger', + }, + }; + } + } + const networkId = Number(transactionNetworkId); let transactions: NetworkTransactionInfo[] = []; let donationType; @@ -120,53 +163,158 @@ export const createDonation = async (request: AdminJsRequestInterface) => { }; } + // Handle different transaction types if (txType === 'csvAirDrop') { - // transactions = await getDisperseTransactions(txHash, networkId); transactions = await getCsvAirdropTransactions(txHash, networkId); donationType = DONATION_TYPES.CSV_AIR_DROP; } else if (txType === 'gnosisSafe') { - // transactions = await getDisperseTransactions(txHash, networkId); transactions = await getGnosisSafeTransactions(txHash, networkId); donationType = DONATION_TYPES.GNOSIS_SAFE; } else { - const txInfo = await findEvmTransactionByHash({ - networkId, - txHash, - symbol: currency, - } as TransactionDetailInput); - if (!txInfo) { + // Handle both EVM and non-EVM transactions + if (chainType === ChainType.EVM) { + try { + const txInfo = await findEvmTransactionByHash({ + networkId, + txHash, + symbol: currency, + } as TransactionDetailInput); + if (!txInfo) { + return { + record: { + params: request?.payload || {}, + errors: { + transactionId: { + message: 'Transaction not found on blockchain', + }, + }, + }, + notice: { + message: 'Transaction not found on blockchain', + type: 'danger', + }, + }; + } + transactions.push(txInfo); + } catch (error) { + const errorMessage = + error instanceof Error ? error.message : String(error); + return { + record: { + params: request?.payload || {}, + errors: { + currency: { + message: `Token not found or chain not supported by project: ${errorMessage}`, + }, + transactionNetworkId: { + message: `Token not found or chain not supported by project: ${errorMessage}`, + }, + }, + }, + notice: { + message: `Token not found or chain not supported by project: ${errorMessage}`, + type: 'danger', + }, + }; + } + } else { + // Non-EVM chain - validate transaction using appropriate service + try { + const txInfo = await getTransactionInfoFromNetwork({ + txHash, + symbol: currency, + networkId, + fromAddress: fromWalletAddress, + toAddress: toWalletAddress, + amount: Number(amount), + timestamp: Number(timestamp), + chainType, + } as TransactionDetailInput); + transactions.push(txInfo); + } catch (error) { + const errorMessage = + error instanceof Error ? error.message : String(error); + return { + record: { + params: request?.payload || {}, + errors: { + transactionId: { + message: `Transaction validation failed: ${errorMessage}`, + }, + }, + }, + notice: { + message: `Transaction validation failed: ${errorMessage}`, + type: 'danger', + }, + }; + } + } + } + + // Validate QF Round if provided + let qfRound: any = null; + if (qfRoundId) { + qfRound = await findQfRoundById(Number(qfRoundId)); + if (!qfRound) { return { record: { params: request?.payload || {}, errors: { - transactionId: { message: 'Transaction not found on blockchain' }, + qfRoundId: { message: 'QF Round not found' }, }, }, notice: { - message: 'Transaction not found on blockchain', + message: 'QF Round not found', type: 'danger', }, }; } - transactions.push(txInfo); } for (const transactionInfo of transactions) { - // const project = await Project.findOne({ - // walletAddress: transactionInfo?.to, - // }); - const project = await Project.createQueryBuilder('project') - .where(`lower("walletAddress")=lower(:address)`, { - address: transactionInfo?.to, - }) + const projectQuery = Project.createQueryBuilder('project') + .innerJoinAndSelect('project.addresses', 'projectAddress') .leftJoinAndSelect('project.organization', 'organization') - .getOne(); + .leftJoinAndSelect('project.qfRounds', 'qfRounds') + .where('projectAddress.isRecipient = true') + .andWhere('projectAddress.networkId = :networkId', { networkId }); + + switch (chainType) { + case ChainType.SOLANA: + projectQuery.andWhere('projectAddress.address = :address', { + address: transactionInfo?.to, + }); + break; + case ChainType.STELLAR: + projectQuery.andWhere( + 'UPPER(projectAddress.address) = UPPER(:address)', + { + address: transactionInfo?.to, + }, + ); + break; + default: + projectQuery.andWhere( + 'LOWER(projectAddress.address) = LOWER(:address)', + { + address: transactionInfo?.to, + }, + ); + } + + const project = await projectQuery.getOne(); if (!project) { if (transactions.length === 1) { return { record: { params: request?.payload || {}, + errors: { + toWalletAddress: { + message: 'Project not found', + }, + }, }, notice: { message: 'Project not found', @@ -188,6 +336,131 @@ export const createDonation = async (request: AdminJsRequestInterface) => { continue; } + // Check memo for Stellar transactions + if (chainType === ChainType.STELLAR) { + const projectAddress = project.addresses?.find( + addr => + addr.isRecipient && + addr.networkId === networkId && + addr.address.toUpperCase() === transactionInfo?.to?.toUpperCase(), + ); + + const projectMemo = projectAddress?.memo; + const memoMatches = + (toWalletMemo && projectMemo === toWalletMemo) || + (!toWalletMemo && !projectMemo); + + if (!memoMatches) { + if (transactions.length === 1) { + return { + record: { + params: request?.payload || {}, + errors: { + toWalletMemo: { + message: 'Wrong memo for this project', + }, + }, + }, + notice: { + message: 'Wrong memo for this project', + type: 'danger', + }, + }; + } + logger.error('Creating donation by adminJs error - Wrong memo', { + hash: txHash, + toAddress: transactionInfo?.to, + networkId, + expectedMemo: projectMemo, + providedMemo: toWalletMemo, + }); + continue; + } + } + + // Validate QF Round conditions + if (qfRound) { + // a) Check if project is part of the QF round + const isProjectInQfRound = project.qfRounds?.some( + qr => qr.id === qfRound!.id, + ); + if (!isProjectInQfRound) { + return { + record: { + params: request?.payload || {}, + errors: { + qfRoundId: { + message: 'Project is not part of this QF round', + }, + }, + }, + notice: { + message: 'Project is not part of this QF round', + type: 'danger', + }, + }; + } + + // b) Check if donation is on QF round eligible network + if (!qfRound.isEligibleNetwork(networkId)) { + return { + record: { + params: request?.payload || {}, + errors: { + transactionNetworkId: { + message: `Network ${networkId} is not eligible for this QF round. Eligible networks: ${qfRound.eligibleNetworks.join(', ') || 'all networks'}`, + }, + }, + }, + notice: { + message: `Network ${networkId} is not eligible for this QF round`, + type: 'danger', + }, + }; + } + + // c) Check if donation timestamp is between QF start and end date + const donationDate = new Date(transactionInfo?.timestamp * 1000); + if ( + donationDate < qfRound.beginDate || + donationDate > qfRound.endDate + ) { + return { + record: { + params: request?.payload || {}, + errors: { + timestamp: { + message: `Donation timestamp (${donationDate.toISOString()}) is not within QF round period (${qfRound.beginDate.toISOString()} - ${qfRound.endDate.toISOString()})`, + }, + }, + }, + notice: { + message: 'Donation timestamp is not within QF round period', + type: 'danger', + }, + }; + } + } + + // 2. Check if donor is donating to their own project + const donor = await findUserByWalletAddress(transactionInfo?.from); + if (donor && project.adminUserId === donor.id) { + return { + record: { + params: request?.payload || {}, + errors: { + fromWalletAddress: { + message: "Donor can't donate to his/her own project", + }, + }, + }, + notice: { + message: "Donor can't donate to his/her own project", + type: 'danger', + }, + }; + } + const tokenInDb = await Token.findOne({ where: { networkId, @@ -260,12 +533,16 @@ export const createDonation = async (request: AdminJsRequestInterface) => { status: DONATION_STATUS.VERIFIED, isProjectGivbackEligible: project.isGivbackEligible, donationType, - createdAt: new Date(transactionInfo?.timestamp * 1000), + isQRDonation: chainType === ChainType.STELLAR, + createdAt: + chainType === ChainType.STELLAR + ? new Date(transactionInfo?.timestamp) + : new Date(transactionInfo?.timestamp * 1000), anonymous, isTokenEligibleForGivback, qfRoundId: qfRoundId ? Number(qfRoundId) : undefined, + toWalletMemo: toWalletMemo || undefined, }); - const donor = await findUserByWalletAddress(transactionInfo?.from); if (donor) { donation.anonymous = false; donation.user = donor; @@ -693,6 +970,15 @@ export const donationTab = { new: true, }, }, + isQRDonation: { + isVisible: { + list: false, + filter: false, + show: true, + edit: false, + new: false, + }, + }, userId: { isVisible: { list: true, @@ -716,18 +1002,20 @@ export const donationTab = { list: true, filter: true, show: true, - edit: false, + edit: true, new: true, }, + description: 'Required for non-EVM chains only', }, toWalletAddress: { isVisible: { list: false, filter: true, show: true, - edit: false, + edit: true, new: true, }, + description: 'Required for non-EVM chains only', }, amount: { type: 'number', @@ -735,9 +1023,10 @@ export const donationTab = { list: false, filter: false, show: true, - edit: false, - new: false, + edit: true, + new: true, }, + description: 'Required for non-EVM chains only', }, distributedFundQfRoundId: { isVisible: false, @@ -806,7 +1095,7 @@ export const donationTab = { availableValues: [ { value: NETWORK_IDS.MAIN_NET, label: 'Mainnet' }, { value: NETWORK_IDS.XDAI, label: 'Xdai' }, - { value: NETWORK_IDS.SEPOLIA, label: 'sepolia' }, + { value: NETWORK_IDS.SEPOLIA, label: 'Sepolia' }, { value: NETWORK_IDS.POLYGON, label: 'Polygon' }, { value: NETWORK_IDS.CELO, label: 'Celo' }, { value: NETWORK_IDS.CELO_ALFAJORES, label: 'Alfajores' }, @@ -816,6 +1105,14 @@ export const donationTab = { { value: NETWORK_IDS.BASE_SEPOLIA, label: 'Base Sepolia' }, { value: NETWORK_IDS.ZKEVM_MAINNET, label: 'ZKEVM Mainnet' }, { value: NETWORK_IDS.ZKEVM_CARDONA, label: 'ZKEVM Cardona' }, + { value: NETWORK_IDS.OPTIMISTIC, label: 'Optimistic' }, + { value: NETWORK_IDS.OPTIMISM_SEPOLIA, label: 'Optimism Sepolia' }, + { value: NETWORK_IDS.STELLAR_MAINNET, label: 'Stellar Mainnet' }, + { value: NETWORK_IDS.SOLANA_MAINNET, label: 'Solana Mainnet' }, + { value: NETWORK_IDS.SOLANA_TESTNET, label: 'Solana Testnet' }, + { value: NETWORK_IDS.SOLANA_DEVNET, label: 'Solana Devnet' }, + { value: NETWORK_IDS.CARDANO_MAINNET, label: 'Cardano Mainnet' }, + { value: NETWORK_IDS.CARDANO_PREPROD, label: 'Cardano Preprod' }, ], isVisible: { list: true, @@ -855,6 +1152,14 @@ export const donationTab = { edit: false, }, }, + qfRoundErrorMessage: { + isVisible: { + list: false, + filter: false, + show: false, + edit: false, + }, + }, swapTransaction: { isVisible: { list: false, @@ -869,10 +1174,46 @@ export const donationTab = { filter: false, show: true, edit: true, - new: false, + new: true, }, type: 'number', }, + chainType: { + availableValues: [ + { value: ChainType.EVM, label: 'EVM' }, + { value: ChainType.SOLANA, label: 'Solana' }, + { value: ChainType.STELLAR, label: 'Stellar' }, + { value: ChainType.CARDANO, label: 'Cardano' }, + ], + isVisible: { + filter: false, + list: true, + show: true, + new: true, + edit: true, + }, + }, + timestamp: { + isVisible: { + list: false, + filter: false, + show: false, + edit: true, + new: true, + }, + type: 'number', + description: 'Unix Timestamp in seconds non-EVM only', + }, + toWalletMemo: { + isVisible: { + list: false, + filter: false, + show: true, + edit: true, + new: true, + }, + description: 'Optional memo for Stellar transactions', + }, transactionId: { isVisible: { list: true, diff --git a/src/server/adminJs/tabs/projectQfRoundsTab.ts b/src/server/adminJs/tabs/projectQfRoundsTab.ts index ede87b322..45bb3e5b6 100644 --- a/src/server/adminJs/tabs/projectQfRoundsTab.ts +++ b/src/server/adminJs/tabs/projectQfRoundsTab.ts @@ -1,14 +1,18 @@ +import { ActionResponse } from 'adminjs'; import { RecordJSON } from 'adminjs/src/frontend/interfaces/record-json.interface'; import { ProjectQfRound } from '../../../entities/projectQfRound'; +import { findProjectById } from '../../../repositories/projectRepository'; +import { updateProjectStatistics } from '../../../services/projectService'; +import { updateUserTotalReceived } from '../../../services/userService'; +import { logger } from '../../../utils/logger'; +import { + AdminJsContextInterface, + AdminJsRequestInterface, +} from '../adminJs-types'; import { canAccessProjectQfRoundAction, ResourceActions, } from '../adminJsPermissions'; -import { - AdminJsRequestInterface, - AdminJsContextInterface, -} from '../adminJs-types'; -import { logger } from '../../../utils/logger'; const deleteProjectQfRound = async ( request: AdminJsRequestInterface, @@ -95,6 +99,58 @@ const deleteProjectQfRound = async ( }; }; +/** + * Update project and project user statistics after ProjectQfRound creation + * + * MAIN PURPOSE: to update the statistics for a given project and QF round when project has been added to a QF round + * + * @param response ActionResponse + * @returns Promise + */ +const afterCreateUpdateStatistics = async ( + response: ActionResponse, +): Promise => { + const record = response.record; + if (!record) return response; + + try { + const projectId = Number(record.params?.projectId); + const qfRoundId = Number(record.params?.qfRoundId); + + if (projectId && qfRoundId) { + logger.debug('Updating statistics after ProjectQfRound creation:', { + projectId, + qfRoundId, + }); + + // Update statistics for this project-round combination + await updateProjectStatistics(projectId, qfRoundId); + + // Update project user statistics + const project = await findProjectById(projectId); + if (project?.adminUser?.id) { + await updateUserTotalReceived(project.adminUser.id); + } + + logger.info( + 'Statistics updated successfully after ProjectQfRound creation:', + { + projectId, + qfRoundId, + }, + ); + } + } catch (error) { + logger.error('Error updating statistics after ProjectQfRound creation:', { + error: error.message, + recordParams: record.params, + }); + // Don't fail the creation, just log the error + } + + return response; +}; + export const projectQfRoundsTab = { resource: ProjectQfRound, options: { @@ -108,23 +164,18 @@ export const projectQfRoundsTab = { canAccessProjectQfRoundAction({ currentAdmin }, ResourceActions.SHOW), }, delete: { - isVisible: true, + isVisible: false, // Disabled - Project QF Rounds are now managed in v6-core admin panel + isAccessible: false, handler: deleteProjectQfRound, - isAccessible: ({ currentAdmin }) => - canAccessProjectQfRoundAction( - { currentAdmin }, - ResourceActions.DELETE, - ), }, new: { - isVisible: true, - isAccessible: ({ currentAdmin }) => - canAccessProjectQfRoundAction({ currentAdmin }, ResourceActions.NEW), + isVisible: false, // Disabled - Project QF Rounds are now managed in v6-core admin panel + isAccessible: false, + after: afterCreateUpdateStatistics, }, edit: { - isVisible: true, - isAccessible: ({ currentAdmin }) => - canAccessProjectQfRoundAction({ currentAdmin }, ResourceActions.EDIT), + isVisible: false, // Disabled - Project QF Rounds are now managed in v6-core admin panel + isAccessible: false, }, bulkDelete: { isVisible: false, diff --git a/src/server/adminJs/tabs/projectsTab.ts b/src/server/adminJs/tabs/projectsTab.ts index 8358bfe4b..e3e3a6ef5 100644 --- a/src/server/adminJs/tabs/projectsTab.ts +++ b/src/server/adminJs/tabs/projectsTab.ts @@ -54,6 +54,8 @@ import { User } from '../../../entities/user'; import { extractAdminJsReferrerUrlParams } from '../adminJs'; import { Category } from '../../../entities/category'; import { getRedirectUrl } from '../adminJsUtils'; +import { ProjectVerificationForm } from '../../../entities/projectVerificationForm'; +import { updateUserTotalReceived } from '../../../services/userService'; // add queries depending on which filters were selected export const buildProjectsQuery = ( @@ -1112,6 +1114,7 @@ export const projectsTab = { const project = await Project.findOne({ where: { id: request?.record?.id }, }); + const oldProjectAdminUserId = project?.adminUserId; if (project) { if (request?.record?.params?.adminChanged) { const adminUser = await User.findOne({ @@ -1119,6 +1122,24 @@ export const projectsTab = { }); project.adminUser = adminUser!; await project.save(); + + // Update project verification form owner if it has been changed + const projectVerificationForm = + await ProjectVerificationForm.findOne({ + where: { projectId: project.id }, + }); + if (projectVerificationForm) { + projectVerificationForm.user = adminUser!; + await projectVerificationForm.save(); + } + + // Update new project user totalDonated and totalReceived + await updateUserTotalReceived(project.adminUserId); + + // Update old project user totalDonated and totalReceived + if (oldProjectAdminUserId) { + await updateUserTotalReceived(oldProjectAdminUserId); + } } // Not required for now // Project.notifySegment(project, SegmentEvents.PROJECT_EDITED); diff --git a/src/server/adminJs/tabs/qfRoundTab.ts b/src/server/adminJs/tabs/qfRoundTab.ts index a11ebbee3..c50659b82 100644 --- a/src/server/adminJs/tabs/qfRoundTab.ts +++ b/src/server/adminJs/tabs/qfRoundTab.ts @@ -1,36 +1,36 @@ import fs from 'fs'; +import adminJs, { ValidationError } from 'adminjs'; import { ActionResponse, After, } from 'adminjs/src/backend/actions/action.interface'; -import adminJs, { ValidationError } from 'adminjs'; import { RecordJSON } from 'adminjs/src/frontend/interfaces/record-json.interface'; +import config from '../../../config'; import { QfRound } from '../../../entities/qfRound'; -import { canAccessQfRoundAction, ResourceActions } from '../adminJsPermissions'; +import { pinFile } from '../../../middleware/pinataUtils'; +import { AppDataSource } from '../../../ormconfig'; +import { NETWORK_IDS } from '../../../provider'; +import { + countActiveQfRounds, + findQfRoundById, + getRelatedProjectsOfQfRound, +} from '../../../repositories/qfRoundRepository'; +import { relateManyProjectsToQfRound } from '../../../repositories/qfRoundRepository2'; +import { addQfRoundDonationsSheetToSpreadsheet } from '../../../services/googleSheets'; import { getQfRoundActualDonationDetails, refreshProjectActualMatchingView, refreshProjectEstimatedMatchingView, } from '../../../services/projectViewsService'; +import { isQfRoundHasEnded } from '../../../services/qfRoundService'; +import { errorMessages } from '../../../utils/errorMessages'; +import { logger } from '../../../utils/logger'; +import { messages } from '../../../utils/messages'; import { AdminJsContextInterface, AdminJsRequestInterface, } from '../adminJs-types'; -import { isQfRoundHasEnded } from '../../../services/qfRoundService'; -import { - findQfRoundById, - getRelatedProjectsOfQfRound, -} from '../../../repositories/qfRoundRepository'; -import { NETWORK_IDS } from '../../../provider'; -import { logger } from '../../../utils/logger'; -import { messages } from '../../../utils/messages'; -import { addQfRoundDonationsSheetToSpreadsheet } from '../../../services/googleSheets'; -import { errorMessages } from '../../../utils/errorMessages'; -import { relateManyProjectsToQfRound } from '../../../repositories/qfRoundRepository2'; -import { pinFile } from '../../../middleware/pinataUtils'; -import { AppDataSource } from '../../../ormconfig'; -import { countActiveQfRounds } from '../../../repositories/qfRoundRepository'; -import config from '../../../config'; +import { canAccessQfRoundAction, ResourceActions } from '../adminJsPermissions'; let initialProjectIds: number[] = []; @@ -480,16 +480,15 @@ export const qfRoundTab = { actions: { delete: { isVisible: false, - isAccessible: ({ currentAdmin }) => - canAccessQfRoundAction({ currentAdmin }, ResourceActions.DELETE), + isAccessible: false, // Disabled - QF Rounds are now managed in v6-core admin panel after: refreshMaterializedViews, }, bulkDelete: { isVisible: false, }, new: { - isAccessible: ({ currentAdmin }) => - canAccessQfRoundAction({ currentAdmin }, ResourceActions.NEW), + isAccessible: false, // Disabled - QF Rounds are now managed in v6-core admin panel + isVisible: false, handler: async ( request: AdminJsRequestInterface, _response, @@ -537,8 +536,24 @@ export const qfRoundTab = { await handleBannerMobile(request.payload); await handleHubCardImage(request.payload); + // Process array fields properly (AdminJS sometimes sends them as indexed properties even in NEW) + const processedPayload: any = {}; + + Object.keys(request.payload).forEach(key => { + // Handle eligibleNetworks array + if (key.startsWith('eligibleNetworks.')) { + if (!processedPayload.eligibleNetworks) { + processedPayload.eligibleNetworks = []; + } + const index = parseInt(key.split('.')[1]); + processedPayload.eligibleNetworks[index] = request.payload[key]; + } else { + processedPayload[key] = request.payload[key]; + } + }); + // Create the record - const qfRound = QfRound.create(request.payload); + const qfRound = QfRound.create(processedPayload); record = await qfRound.save(); } catch (error) { logger.error('Error creating QF Round:', error); @@ -574,8 +589,8 @@ export const qfRoundTab = { after: fillProjects, }, edit: { - isAccessible: ({ currentAdmin }) => - canAccessQfRoundAction({ currentAdmin }, ResourceActions.EDIT), + isAccessible: false, // Disabled - QF Rounds are now managed in v6-core admin panel + isVisible: false, handler: async ( request: AdminJsRequestInterface, response, diff --git a/src/server/bootstrap.ts b/src/server/bootstrap.ts index 3923d2cf0..2bb9552d7 100644 --- a/src/server/bootstrap.ts +++ b/src/server/bootstrap.ts @@ -1,76 +1,76 @@ // @ts-check -import path from 'path'; import http from 'http'; -import { rateLimit } from 'express-rate-limit'; -import { RedisStore } from 'rate-limit-redis'; +import path from 'path'; +import { Resource } from '@adminjs/typeorm'; import { ApolloServer } from '@apollo/server'; +import { ApolloServerPluginLandingPageGraphQLPlayground } from '@apollo/server-plugin-landing-page-graphql-playground'; +import { ApolloServerErrorCode } from '@apollo/server/errors'; import { expressMiddleware } from '@apollo/server/express4'; +import { ApolloServerPluginLandingPageDisabled } from '@apollo/server/plugin/disabled'; import { ApolloServerPluginSchemaReporting } from '@apollo/server/plugin/schemaReporting'; -import { ApolloServerPluginLandingPageGraphQLPlayground } from '@apollo/server-plugin-landing-page-graphql-playground'; -import express, { json, Request, Response } from 'express'; -import { Container } from 'typedi'; -import { Resource } from '@adminjs/typeorm'; +import bodyParser from 'body-parser'; import { validate } from 'class-validator'; -import { ModuleThread, Pool, spawn, Worker } from 'threads'; -import { DataSource } from 'typeorm'; import cors from 'cors'; -import bodyParser from 'body-parser'; +import express, { json, Request, Response } from 'express'; +import { rateLimit } from 'express-rate-limit'; import graphqlUploadExpress from 'graphql-upload/graphqlUploadExpress.js'; -import { ApolloServerPluginLandingPageDisabled } from '@apollo/server/plugin/disabled'; -import { ApolloServerErrorCode } from '@apollo/server/errors'; +import { RedisStore } from 'rate-limit-redis'; +import { ModuleThread, Pool, spawn, Worker } from 'threads'; +import { Container } from 'typedi'; +import { DataSource } from 'typeorm'; import config from '../config'; +import SentryLogger from '../sentryLogger'; import { handleStripeWebhook } from '../utils/stripe'; import createSchema from './createSchema'; -import SentryLogger from '../sentryLogger'; +import { runCheckProjectVerificationStatus } from '../services/cronJobs/checkProjectVerificationStatus'; import { runCheckPendingDonationsCronJob } from '../services/cronJobs/syncDonationsWithNetwork'; import { runCheckPendingProjectListingCronJob } from '../services/cronJobs/syncProjectsRequiredForListing'; -import { runCheckProjectVerificationStatus } from '../services/cronJobs/checkProjectVerificationStatus'; -import { adminJsRootPath, getAdminJsRouter } from './adminJs/adminJs'; import { redis } from '../redis'; -import { logger } from '../utils/logger'; import { runNotifyMissingDonationsCronJob } from '../services/cronJobs/notifyDonationsWithSegment'; import { i18n, setI18nLocaleForRequest, translationErrorMessagesKeys, } from '../utils/errorMessages'; +import { logger } from '../utils/logger'; +import { adminJsRootPath, getAdminJsRouter } from './adminJs/adminJs'; // import { apiGivRouter } from '../routers/apiGivRoutes'; -import { authorizationHandler } from '../services/authorizationServices'; -import { - oauth2CallbacksRouter, - SOCIAL_PROFILES_PREFIX, -} from '../routers/oauth2Callbacks'; +import { AppDataSource, CronDataSource } from '../orm'; import { dropDbCronExtension, schedulePowerBoostingSnapshot, schedulePowerSnapshotsHistory, } from '../repositories/dbCronRepository'; +import { + oauth2CallbacksRouter, + SOCIAL_PROFILES_PREFIX, +} from '../routers/oauth2Callbacks'; +import { authorizationHandler } from '../services/authorizationServices'; +import { runSyncBackupServiceDonations } from '../services/cronJobs/backupDonationImportJob'; +import { scheduleCauseDistributionJob } from '../services/cronJobs/causeDistributionJob'; +import { runCheckActiveStatusOfQfRounds } from '../services/cronJobs/checkActiveStatusQfRounds'; +import { runCheckAndUpdateEndaomentProject } from '../services/cronJobs/checkAndUpdateEndaomentProject'; +import { runCheckQRTransactionJob } from '../services/cronJobs/checkQRTransactionJob'; +import { runCheckUserSuperTokenBalancesJob } from '../services/cronJobs/checkUserSuperTokenBalancesJob'; +import { runDraftDonationMatchWorkerJob } from '../services/cronJobs/draftDonationMatchingJob'; import { runFillPowerSnapshotBalanceCronJob } from '../services/cronJobs/fillSnapshotBalances'; -import { runUpdatePowerRoundCronJob } from '../services/cronJobs/updatePowerRoundJob'; -import { AppDataSource, CronDataSource } from '../orm'; -import { ApolloContext } from '../types/ApolloContext'; -import { ProjectResolverWorker } from '../workers/projectsResolverWorker'; +import { runGenerateSitemapOnFrontend } from '../services/cronJobs/generateSitemapOnFrontend'; +import { runSyncLostDonations } from '../services/cronJobs/importLostDonationsJob'; import { runInstantBoostingUpdateCronJob } from '../services/cronJobs/instantBoostingUpdateJob'; -import { runCheckActiveStatusOfQfRounds } from '../services/cronJobs/checkActiveStatusQfRounds'; +import { runProjectEvaluationCronJob } from '../services/cronJobs/projectEvaluationService'; +import { runCheckPendingRecurringDonationsCronJob } from '../services/cronJobs/syncRecurringDonationsWithNetwork'; +import { runCheckPendingSwapsCronJob } from '../services/cronJobs/syncSwapTransactions'; +import { runUpdatePowerRoundCronJob } from '../services/cronJobs/updatePowerRoundJob'; import { runUpdateProjectCampaignsCacheJob } from '../services/cronJobs/updateProjectCampaignsCacheJob'; -import { corsOptions, whitelistHostnames } from './cors'; -import { runSyncLostDonations } from '../services/cronJobs/importLostDonationsJob'; -import { runSyncBackupServiceDonations } from '../services/cronJobs/backupDonationImportJob'; import { runUpdateRecurringDonationStream } from '../services/cronJobs/updateStreamOldRecurringDonationsJob'; -import { runDraftDonationMatchWorkerJob } from '../services/cronJobs/draftDonationMatchingJob'; -import { runCheckUserSuperTokenBalancesJob } from '../services/cronJobs/checkUserSuperTokenBalancesJob'; -import { runCheckPendingRecurringDonationsCronJob } from '../services/cronJobs/syncRecurringDonationsWithNetwork'; -import { runCheckQRTransactionJob } from '../services/cronJobs/checkQRTransactionJob'; +import { refreshProjectEstimatedMatchingView } from '../services/projectViewsService'; import { addClient } from '../services/sse/sse'; +import { ApolloContext } from '../types/ApolloContext'; import { isTestEnv } from '../utils/utils'; -import { refreshProjectEstimatedMatchingView } from '../services/projectViewsService'; -import { runCheckAndUpdateEndaomentProject } from '../services/cronJobs/checkAndUpdateEndaomentProject'; -import { runGenerateSitemapOnFrontend } from '../services/cronJobs/generateSitemapOnFrontend'; -import { runCheckPendingSwapsCronJob } from '../services/cronJobs/syncSwapTransactions'; -import { runProjectEvaluationCronJob } from '../services/cronJobs/projectEvaluationService'; -import { scheduleCauseDistributionJob } from '../services/cronJobs/causeDistributionJob'; +import { ProjectResolverWorker } from '../workers/projectsResolverWorker'; +import { corsOptions, whitelistHostnames } from './cors'; Resource.validate = validate; @@ -324,7 +324,7 @@ export async function bootstrap() { logger.info('GraphQL endpoint disabled - ENABLE_GRAPHQL is set to false'); } - // AdminJs! + // AdminJs app.use(adminJsRootPath, await getAdminJsRouter()); app.use(bodyParserJson); // app.use('/apigive', apiGivRouter); diff --git a/src/services/authorizationService.test.ts b/src/services/authorizationService.test.ts index f79329a79..180ad274e 100644 --- a/src/services/authorizationService.test.ts +++ b/src/services/authorizationService.test.ts @@ -1,15 +1,14 @@ -import { assert } from 'chai'; import Axios from 'axios'; +import { assert } from 'chai'; import { ethers } from 'ethers'; import { generateRandomEtheriumAddress, generateTestAccessToken, saveUserDirectlyToDb, } from '../../test/testUtils'; -import { User } from '../entities/user'; -import { authorizationHandler } from './authorizationServices'; import config from '../config'; import { findUserByWalletAddress } from '../repositories/userRepository'; +import { authorizationHandler } from './authorizationServices'; describe('authorizationHandler() test cases', authorizationHandlerTestCases); @@ -26,41 +25,41 @@ function authorizationHandlerTestCases() { const jwtUser = await authorizationHandler('1', accessToken); assert.equal(jwtUser.userId, user.id); }); - it('should decode user jwt with the auth microservice', async () => { - const privateKey = process.env.PRIVATE_ETHERS_TEST_KEY as string; - const publicKey = process.env.PUBLIC_ETHERS_TEST_KEY as string; + // it('should decode user jwt with the auth microservice', async () => { + // const privateKey = process.env.PRIVATE_ETHERS_TEST_KEY as string; + // const publicKey = process.env.PUBLIC_ETHERS_TEST_KEY as string; - const user = await saveUserDirectlyToDb(publicKey); - const nonceRoute = config.get('AUTH_MICROSERVICE_NONCE_URL') as string; - const nonceResult = await Axios.get(nonceRoute); - const wallet = new ethers.Wallet(privateKey); + // const user = await saveUserDirectlyToDb(publicKey); + // const nonceRoute = config.get('AUTH_MICROSERVICE_NONCE_URL') as string; + // const nonceResult = await Axios.get(nonceRoute); + // const wallet = new ethers.Wallet(privateKey); - const siweMessage = new siwe.SiweMessage({ - domain, - address: publicKey, - nonce: nonceResult.data.message, // verification servers gives - statement: 'This is a test statement.', - uri: origin, - version: '1', - chainId: '1', - }); - const textMessage = siweMessage.prepareMessage(); - const signature = await wallet.signMessage(textMessage); + // const siweMessage = new siwe.SiweMessage({ + // domain, + // address: publicKey, + // nonce: nonceResult.data.message, // verification servers gives + // statement: 'This is a test statement.', + // uri: origin, + // version: '1', + // chainId: '1', + // }); + // const textMessage = siweMessage.prepareMessage(); + // const signature = await wallet.signMessage(textMessage); - const authenticationRoute = config.get( - 'AUTH_MICROSERVICE_AUTHENTICATION_URL', - ) as string; - const authenticationResult = await Axios.post(authenticationRoute, { - message: textMessage, - nonce: nonceResult.data.message, - signature, - }); + // const authenticationRoute = config.get( + // 'AUTH_MICROSERVICE_AUTHENTICATION_URL', + // ) as string; + // const authenticationResult = await Axios.post(authenticationRoute, { + // message: textMessage, + // nonce: nonceResult.data.message, + // signature, + // }); - const accessToken = authenticationResult.data.jwt; - const jwtUser = await authorizationHandler('2', accessToken); - assert.equal(jwtUser.userId, user.id); - await User.delete(user.id); - }); + // const accessToken = authenticationResult.data.jwt; + // const jwtUser = await authorizationHandler('2', accessToken); + // assert.equal(jwtUser.userId, user.id); + // await User.delete(user.id); + // }); it('should decode jwt and create user if it is nonexistent', async () => { const privateKey = process.env.PRIVATE_ETHERS_SECONDARY_TEST_KEY as string; const publicKey = process.env.PUBLIC_ETHERS_SECONDARY_TEST_KEY as string; diff --git a/src/services/chains/evm/draftDonationService.ts b/src/services/chains/evm/draftDonationService.ts index c3583fb64..985c9b5ea 100644 --- a/src/services/chains/evm/draftDonationService.ts +++ b/src/services/chains/evm/draftDonationService.ts @@ -266,6 +266,7 @@ async function submitMatchedDraftDonation( projectId, referrerId, qfRoundId, + fromTokenAmount, } = draftDonation; try { @@ -291,7 +292,7 @@ async function submitMatchedDraftDonation( undefined, // useDonationBox undefined, // relevantDonationTxHash undefined, // swapData - draftDonation.fromTokenAmount, // fromTokenAmount + fromTokenAmount ?? undefined, // fromTokenAmount: preserves 0, null/undefined โ†’ undefined qfRoundId, // roundId ); diff --git a/src/services/recurringDonationService.test.ts b/src/services/recurringDonationService.test.ts index 3fa834a0f..ddfa14988 100644 --- a/src/services/recurringDonationService.test.ts +++ b/src/services/recurringDonationService.test.ts @@ -656,20 +656,26 @@ function qfSmartSelectTestCases() { project.qfRounds = [qfRound1, qfRound2]; await project.save(); - // Also create explicit ProjectQfRound relations to ensure database consistency - await ProjectQfRound.create({ - projectId: project.id, - qfRoundId: qfRound1.id, - sumDonationValueUsd: 0, - countUniqueDonors: 0, - }).save(); + // Upsert ProjectQfRound relations to ensure database consistency + await ProjectQfRound.upsert( + { + projectId: project.id, + qfRoundId: qfRound1.id, + sumDonationValueUsd: 0, + countUniqueDonors: 0, + }, + ['projectId', 'qfRoundId'], + ); - await ProjectQfRound.create({ - projectId: project.id, - qfRoundId: qfRound2.id, - sumDonationValueUsd: 0, - countUniqueDonors: 0, - }).save(); + await ProjectQfRound.upsert( + { + projectId: project.id, + qfRoundId: qfRound2.id, + sumDonationValueUsd: 0, + countUniqueDonors: 0, + }, + ['projectId', 'qfRoundId'], + ); // Create recurring donation const recurringDonation = await saveRecurringDonationDirectlyToDb({ diff --git a/src/services/sse/sse.ts b/src/services/sse/sse.ts index 3c0cdf211..0c627a5de 100644 --- a/src/services/sse/sse.ts +++ b/src/services/sse/sse.ts @@ -1,7 +1,15 @@ import { Response } from 'express'; +import { Redis } from 'ioredis'; +import { logger } from '../../utils/logger'; +import { redisConfig } from '../../redis'; let clients: Response[] = []; +// Redis Pub/Sub for cross-instance SSE coordination +const redisSubscriber = new Redis(redisConfig); +const redisPublisher = new Redis(redisConfig); +const SSE_CHANNEL = 'sse:notifications'; + type TNewDonation = { type: 'new-donation'; data: { @@ -18,6 +26,51 @@ type TDraftDonationFailed = { }; }; +// Subscribe to Redis notifications and forward to connected clients +redisSubscriber.subscribe(SSE_CHANNEL, (err, count) => { + if (err) { + logger.error('SSE: Failed to subscribe to Redis channel', { error: err }); + } else { + logger.debug('SSE: Subscribed to Redis channel', { + channel: SSE_CHANNEL, + subscriptionCount: count, + }); + } +}); + +redisSubscriber.on('message', (channel, message) => { + if (channel === SSE_CHANNEL) { + logger.debug('SSE: Received message from Redis', { + totalLocalClients: clients.length, + message, + }); + + // Broadcast to all clients connected to THIS instance + // Filter out dead clients on write failure + clients = clients.filter(client => { + try { + client.write(`data: ${message}\n\n`); + return true; + } catch (error) { + logger.error('SSE: Error writing to client, removing from list', { + error, + }); + return false; + } + }); + } +}); + +// Helper function to publish messages to Redis +function publishToRedis(message: string, context: string) { + redisPublisher + .publish(SSE_CHANNEL, message) + .then(() => logger.debug(`SSE: ${context} published successfully`)) + .catch(error => + logger.error(`SSE: Failed to publish ${context}`, { error }), + ); +} + // Add a new client to the SSE stream export function addClient(res: Response) { res.setHeader('Content-Type', 'text/event-stream'); @@ -28,6 +81,7 @@ export function addClient(res: Response) { res.flushHeaders(); clients.push(res); + logger.debug('SSE: New client connected', { totalClients: clients.length }); // Send a welcome message to the newly connected client const data = { @@ -39,16 +93,35 @@ export function addClient(res: Response) { // Remove the client on disconnect res.on('close', () => { clients = clients.filter(client => client !== res); + logger.debug('SSE: Client disconnected', { totalClients: clients.length }); res.end(); }); } // Notify all connected clients about a new donation +// This publishes to Redis, which then broadcasts to ALL instances export function notifyClients(data: TNewDonation) { - clients.forEach(client => client.write(`data: ${JSON.stringify(data)}\n\n`)); + const message = JSON.stringify(data); + + logger.debug('SSE: notifyClients called - publishing to Redis', { + totalLocalClients: clients.length, + data, + }); + + // Publish to Redis using shared connection - this will be received by ALL instances (including this one) + publishToRedis(message, 'new-donation'); } // Notify all connected clients about a failed donation +// This publishes to Redis, which then broadcasts to ALL instances export function notifyDonationFailed(data: TDraftDonationFailed) { - clients.forEach(client => client.write(`data: ${JSON.stringify(data)}\n\n`)); + const message = JSON.stringify(data); + + logger.debug('SSE: notifyDonationFailed called - publishing to Redis', { + totalLocalClients: clients.length, + data, + }); + + // Publish to Redis using shared connection - this will be received by ALL instances (including this one) + publishToRedis(message, 'draft-donation-failed'); } diff --git a/src/services/v6CoreUserSync.ts b/src/services/v6CoreUserSync.ts new file mode 100644 index 000000000..eed528bfd --- /dev/null +++ b/src/services/v6CoreUserSync.ts @@ -0,0 +1,56 @@ +import axios from 'axios'; +import { User } from '../entities/user'; +import { logger } from '../utils/logger'; + +/** + * Webhook sync: send the newly created Impact-Graph user to v6-core. + * + * Env vars: + * - V6_CORE_USER_SYNC_URL: full URL to the v6-core GraphQL endpoint (typically ends with /graphql) + * - V6_CORE_USER_SYNC_PASSWORD: shared secret sent in header + * - V6_CORE_USER_SYNC_PASSWORD_HEADER (optional): header name, default 'x-impact-graph-password' + */ +export async function syncNewImpactGraphUserToV6Core( + user: User, +): Promise { + const url = process.env.V6_CORE_USER_SYNC_URL; + const password = process.env.V6_CORE_USER_SYNC_PASSWORD; + const headerName = + process.env.V6_CORE_USER_SYNC_PASSWORD_HEADER || 'x-impact-graph-password'; + + if (!url || !password) { + logger.warn( + 'syncNewImpactGraphUserToV6Core() skipped: missing V6_CORE_USER_SYNC_URL or V6_CORE_USER_SYNC_PASSWORD', + ); + return; + } + + // Send only the minimal payload needed by v6-core. + const input = { + id: user.id, + walletAddress: user.walletAddress, + }; + + const query = /* GraphQL */ ` + mutation ImpactGraphUpsertUser($input: ImpactGraphUserWebhookInput!) { + impactGraphUpsertUser(input: $input) { + id + } + } + `; + + await axios.post( + url, + { + query, + variables: { input }, + }, + { + headers: { + 'Content-Type': 'application/json', + [headerName]: password, + }, + timeout: Number(process.env.V6_CORE_USER_SYNC_TIMEOUT_MS || 10_000), + }, + ); +} diff --git a/src/utils/user.ts b/src/utils/user.ts index 6c9432d15..345591732 100644 --- a/src/utils/user.ts +++ b/src/utils/user.ts @@ -1,7 +1,7 @@ import axios from 'axios'; import { logger } from '../utils/logger'; -const validateEmailWithRegex = (email: string): boolean => { +export const validateEmailWithRegex = (email: string): boolean => { const emailRegex = /^[^\s@]+@[^\s@]+\.[^\s@]+$/; return emailRegex.test(email); }; diff --git a/test/graphqlQueries.ts b/test/graphqlQueries.ts index fc58f0f28..49f35d3eb 100644 --- a/test/graphqlQueries.ts +++ b/test/graphqlQueries.ts @@ -1571,6 +1571,8 @@ export const updateUser = ` $lastName: String $firstName: String $avatar: String + $twitterName: String + $telegramName: String $newUser: Boolean ) { updateUser( @@ -1580,6 +1582,8 @@ export const updateUser = ` firstName: $firstName lastName: $lastName avatar: $avatar + twitterName: $twitterName + telegramName: $telegramName newUser: $newUser ) } diff --git a/tsconfig.json b/tsconfig.json index 4128761a7..5e15cbec8 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -10,14 +10,14 @@ "esnext.asynciterable", "dom" ], - "allowJs": true, /* Allow javascript files to be compiled. */ - "checkJs": true, /* Report errors in .js files. */ - "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */ + "allowJs": true, /* Allow javascript files to be compiled. */ + "checkJs": false, /* Report errors in .js files. */ + "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */ "declaration": true /* Generates corresponding '.d.ts' file. */, "sourceMap": true /* Generates corresponding '.map' file. */, // "outFile": "./", /* Concatenate and emit output to single file. */ "outDir": "./build" /* Redirect output structure to the directory. */, - "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */ + "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */ "removeComments": false /* Do not emit comments to output. */, // "noEmit": true, /* Do not emit outputs. */ "importHelpers": true /* Import emit helpers from 'tslib'. */, @@ -45,7 +45,7 @@ // "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */ // "typeRoots": [], /* List of folders to include type definitions from. */ // "types": [], /* Type declaration files to be included in compilation. */ -// "allowSyntheticDefaultImports": false /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */, + // "allowSyntheticDefaultImports": false /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */, "esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */, // "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */ "forceConsistentCasingInFileNames": true, @@ -59,7 +59,9 @@ "experimentalDecorators": true /* Enables experimental support for ES7 decorators. */, "emitDecoratorMetadata": true /* Enables experimental support for emitting type metadata for decorators. */ }, - "exclude": ["./build", "node_modules", + "exclude": [ + "./build", + "node_modules", "src/server/adminJs/tabs/components" ] -} +} \ No newline at end of file