Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
62 changes: 62 additions & 0 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,66 @@ jobs:
- run:
name: Stop React App
command: pkill -f npm
jest-tests:
docker:
- image: cimg/node:20.13
resource_class: medium
steps:
- checkout
- restore_cache:
key: app-{{ checksum "app/package-lock.json" }}
- run:
name: Install Dependencies
command: cd app && npm install --legacy-peer-deps
- save_cache:
key: app-{{ checksum "app/package-lock.json" }}
paths:
- ./app/node_modules
- run:
name: Run Jest Tests
command: cd app && CI=true npm test -- --watchAll=false --coverage --coverageReporters=text --coverageReporters=lcov
- store_artifacts:
path: app/coverage
destination: coverage-report
cypress-e2e-tests:
docker:
- image: cimg/node:20.13
resource_class: large
steps:
- checkout
- restore_cache:
key: app-{{ checksum "app/package-lock.json" }}
- run:
name: Install Dependencies
command: cd app && npm install --legacy-peer-deps
- save_cache:
key: app-{{ checksum "app/package-lock.json" }}
paths:
- ./app/node_modules
- run:
name: Start React App in Background
command: cd app && DISABLE_ESLINT_PLUGIN=true npm start
background: true
- run:
name: Wait for React App
command: |
for i in {1..60}; do
if curl -s http://localhost:3000 > /dev/null; then
echo "React app is ready!"
exit 0
fi
echo "Waiting for React app... ($i/60)"
sleep 2
done
echo "React app failed to start"
exit 1
- run:
name: Run Cypress Tests
command: cd app && CI=true npx cypress run
- store_artifacts:
path: app/cypress/videos
- store_artifacts:
path: app/cypress/screenshots

workflows:
wrolpi-api-tests:
Expand All @@ -153,3 +213,5 @@ workflows:
wrolpi-app-test:
jobs:
- react-app-start
- jest-tests
- cypress-e2e-tests
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ mapnik.xml
docker-compose.override.yml

# test directory is used as media directory, we don't want to commit what a user downloads.
test
/test
pg_data

# Directories used to build images
Expand All @@ -135,3 +135,4 @@ pg_data
/pi-gen/*xz

.DS_Store
app/cypress/screenshots
85 changes: 85 additions & 0 deletions alembic/versions/66407d145b76_.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
"""Create collection table for domain collections

Revision ID: 66407d145b76
Revises: 4f03b9548f6e
Create Date: 2025-10-26 10:57:16.462524

"""
import os
from alembic import op
import sqlalchemy as sa
from sqlalchemy.orm import Session


# revision identifiers, used by Alembic.
revision = '66407d145b76'
down_revision = '4f03b9548f6e'
branch_labels = None
depends_on = None

DOCKERIZED = True if os.environ.get('DOCKER', '').lower().startswith('t') else False


def upgrade():
bind = op.get_bind()
session = Session(bind=bind)

# Step 1: Create new collection table (keeping channel table separate for now)
op.create_table(
'collection',
sa.Column('id', sa.BigInteger(), nullable=False),
sa.Column('name', sa.Text(), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('kind', sa.String(), nullable=False),
sa.Column('directory', sa.Text(), nullable=True),
sa.Column('tag_id', sa.Integer(), nullable=True),
sa.Column('created_date', sa.DateTime(), server_default=sa.func.now(), nullable=False),
sa.Column('item_count', sa.Integer(), nullable=False, server_default='0'),
sa.Column('total_size', sa.BigInteger(), nullable=False, server_default='0'),
sa.PrimaryKeyConstraint('id'),
sa.ForeignKeyConstraint(['tag_id'], ['tag.id'], name='collection_tag_id_fkey'),
sa.UniqueConstraint('directory', name='uq_collection_directory')
)

# Create indexes for collection
op.create_index('idx_collection_kind', 'collection', ['kind'], unique=False)
op.create_index('idx_collection_item_count', 'collection', ['item_count'], unique=False)
op.create_index('idx_collection_total_size', 'collection', ['total_size'], unique=False)

# Step 2: Create collection_item junction table
op.create_table(
'collection_item',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('collection_id', sa.Integer(), nullable=False),
sa.Column('file_group_id', sa.Integer(), nullable=False),
sa.Column('position', sa.Integer(), nullable=False, server_default='0'),
sa.Column('added_date', sa.DateTime(), server_default=sa.func.now(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.ForeignKeyConstraint(['collection_id'], ['collection.id'], ondelete='CASCADE'),
sa.ForeignKeyConstraint(['file_group_id'], ['file_group.id'], ondelete='CASCADE'),
sa.UniqueConstraint('collection_id', 'file_group_id', name='uq_collection_file_group')
)

# Create indexes for collection_item
op.create_index('idx_collection_item_collection_id', 'collection_item', ['collection_id'], unique=False)
op.create_index('idx_collection_item_file_group_id', 'collection_item', ['file_group_id'], unique=False)
op.create_index('idx_collection_item_position', 'collection_item', ['position'], unique=False)

# Ensure table owner in non-docker environments
if not DOCKERIZED:
session.execute(sa.text('ALTER TABLE public.collection OWNER TO wrolpi'))
session.execute(sa.text('ALTER TABLE public.collection_item OWNER TO wrolpi'))


def downgrade():
# Drop collection_item table and its indexes
op.drop_index('idx_collection_item_position', table_name='collection_item')
op.drop_index('idx_collection_item_file_group_id', table_name='collection_item')
op.drop_index('idx_collection_item_collection_id', table_name='collection_item')
op.drop_table('collection_item')

# Drop collection table and its indexes
op.drop_index('idx_collection_total_size', table_name='collection')
op.drop_index('idx_collection_item_count', table_name='collection')
op.drop_index('idx_collection_kind', table_name='collection')
op.drop_table('collection')
73 changes: 73 additions & 0 deletions alembic/versions/b43f70f369d0_remove_duplicate_channel_fields_.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
"""Remove duplicate Channel fields delegated to Collection

This migration removes fields from Channel that now delegate to Collection:
- name (now property: channel.name → collection.name)
- directory (now property: channel.directory → collection.directory)
- tag_id (now property: channel.tag_id → collection.tag_id)

These fields were already synced to Collection in previous migration.

Revision ID: b43f70f369d0
Revises: ba98bd360b7a
Create Date: 2025-11-19 21:48:58.488850

"""
import os
from alembic import op
import sqlalchemy as sa
from sqlalchemy import text
from sqlalchemy.dialects import postgresql

# revision identifiers, used by Alembic.
revision = 'b43f70f369d0'
down_revision = 'ba98bd360b7a'
branch_labels = None
depends_on = None

DOCKERIZED = True if os.environ.get('DOCKER', '').lower().startswith('t') else False


def upgrade():
print("\n" + "="*60)
print("Removing Duplicate Fields from Channel")
print("="*60 + "\n")

# Step 1: Drop foreign key constraint on tag_id
print("Step 1: Dropping tag_id foreign key constraint...")
op.drop_constraint('channel_tag_id_fkey', 'channel', type_='foreignkey')
print("✓ Dropped foreign key constraint\n")

# Step 2: Drop columns (data already in Collection)
print("Step 2: Dropping duplicate columns...")
op.drop_column('channel', 'name')
print(" ✓ Dropped channel.name")
op.drop_column('channel', 'directory')
print(" ✓ Dropped channel.directory")
op.drop_column('channel', 'tag_id')
print(" ✓ Dropped channel.tag_id\n")

print("="*60)
print("✓ Channel Cleanup Complete")
print(" Channels now delegate name/directory/tag to Collection")
print("="*60 + "\n")


def downgrade():
# Re-add the columns
op.add_column('channel', sa.Column('name', sa.String(), nullable=True))
op.add_column('channel', sa.Column('directory', sa.String(), nullable=True))
op.add_column('channel', sa.Column('tag_id', sa.Integer(), nullable=True))

# Re-add foreign key
op.create_foreign_key('channel_tag_id_fkey', 'channel', 'tag', ['tag_id'], ['id'], ondelete='CASCADE')

# Restore data from Collection
bind = op.get_bind()
bind.execute(text("""
UPDATE channel
SET name = collection.name,
directory = collection.directory,
tag_id = collection.tag_id
FROM collection
WHERE channel.collection_id = collection.id
"""))
130 changes: 130 additions & 0 deletions alembic/versions/ba98bd360b7a_add_collection_id_to_channel_model.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
"""Add collection_id to Channel model

This migration:
1. Adds collection_id column to channel table
2. Creates Collection records for existing Channels
3. Links Channels to their Collections
4. Adds foreign key constraint

Revision ID: ba98bd360b7a
Revises: migrate_domains_to_collections
Create Date: 2025-11-19 21:10:56.472836

"""
import os
from alembic import op
import sqlalchemy as sa
from sqlalchemy import text
from sqlalchemy.orm import Session

# revision identifiers, used by Alembic.
revision = 'ba98bd360b7a'
down_revision = 'migrate_domains_to_collections'
branch_labels = None
depends_on = None

DOCKERIZED = True if os.environ.get('DOCKER', '').lower().startswith('t') else False


def upgrade():
bind = op.get_bind()
session = Session(bind=bind)

print("\n" + "="*60)
print("Channel → Collection Migration")
print("="*60 + "\n")

# Step 1: Add collection_id column to channel table (nullable initially)
print("Step 1: Adding collection_id column to channel table...")
op.add_column('channel', sa.Column('collection_id', sa.Integer(), nullable=True))
print("✓ Added collection_id column\n")

# Step 2: Create Collection records for each Channel
print("Step 2: Creating Collection records for existing Channels...")

# Fetch all channels
result = session.execute(text("""
SELECT id, name, directory, tag_id
FROM channel
ORDER BY name
"""))

channels = []
for row in result:
channels.append({
'id': row[0],
'name': row[1],
'directory': row[2],
'tag_id': row[3],
})

print(f"Found {len(channels)} Channel records to migrate")

if channels:
from wrolpi.collections import Collection

for channel in channels:
channel_id = channel['id']
channel_name = channel['name']

print(f" Processing Channel {channel_id}: {channel_name}")

# Check if Collection already exists
existing = session.query(Collection).filter_by(
name=channel_name,
kind='channel'
).first()

if existing:
print(f" Collection already exists (id={existing.id})")
collection_id = existing.id
else:
# Create Collection
collection = Collection(
name=channel_name,
kind='channel',
directory=channel['directory'], # Channels always have directory
tag_id=channel['tag_id'],
)
session.add(collection)
session.flush([collection])
collection_id = collection.id
print(f" Created Collection id={collection_id}")

# Link Channel to Collection
session.execute(
text("UPDATE channel SET collection_id = :collection_id WHERE id = :channel_id"),
{'collection_id': collection_id, 'channel_id': channel_id}
)

session.commit()
print(f"✓ Created Collections and linked Channels\n")
else:
print("No channels to migrate\n")

# Step 3: Add foreign key constraint
print("Step 3: Adding foreign key constraint...")
op.create_foreign_key(
'fk_channel_collection_id',
'channel',
'collection',
['collection_id'],
['id'],
ondelete='CASCADE'
)
print("✓ Added foreign key constraint\n")

print("="*60)
print("✓ Channel → Collection Migration Complete")
print("="*60 + "\n")

if not DOCKERIZED:
session.execute(text('ALTER TABLE public.channel OWNER TO wrolpi'))


def downgrade():
# Remove foreign key constraint
op.drop_constraint('fk_channel_collection_id', 'channel', type_='foreignkey')

# Drop collection_id column
op.drop_column('channel', 'collection_id')
Loading