Skip to content

Commit 3964553

Browse files
Creating a new Collection model for use in organizing FileGroups.
Replacing Channels and Domains with Collection. Moving downloading relationships from Channel to Collection. Created domains.yaml config to manage tagged domain collections. Adding turn-key upgrade solution in the UI.
1 parent f988c8e commit 3964553

File tree

115 files changed

+13306
-983
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

115 files changed

+13306
-983
lines changed

.circleci/config.yml

Lines changed: 62 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -142,6 +142,66 @@ jobs:
142142
- run:
143143
name: Stop React App
144144
command: pkill -f npm
145+
jest-tests:
146+
docker:
147+
- image: cimg/node:20.13
148+
resource_class: medium
149+
steps:
150+
- checkout
151+
- restore_cache:
152+
key: app-{{ checksum "app/package-lock.json" }}
153+
- run:
154+
name: Install Dependencies
155+
command: cd app && npm install --legacy-peer-deps
156+
- save_cache:
157+
key: app-{{ checksum "app/package-lock.json" }}
158+
paths:
159+
- ./app/node_modules
160+
- run:
161+
name: Run Jest Tests
162+
command: cd app && CI=true npm test -- --watchAll=false --coverage --coverageReporters=text --coverageReporters=lcov
163+
- store_artifacts:
164+
path: app/coverage
165+
destination: coverage-report
166+
cypress-e2e-tests:
167+
docker:
168+
- image: cimg/node:20.18-browsers
169+
resource_class: large
170+
steps:
171+
- checkout
172+
- restore_cache:
173+
key: app-browsers-{{ checksum "app/package-lock.json" }}
174+
- run:
175+
name: Install Dependencies
176+
command: cd app && npm install --legacy-peer-deps
177+
- save_cache:
178+
key: app-browsers-{{ checksum "app/package-lock.json" }}
179+
paths:
180+
- ./app/node_modules
181+
- run:
182+
name: Start React App in Background
183+
command: cd app && DISABLE_ESLINT_PLUGIN=true npm start
184+
background: true
185+
- run:
186+
name: Wait for React App
187+
command: |
188+
for i in {1..60}; do
189+
if curl -s http://localhost:3000 > /dev/null; then
190+
echo "React app is ready!"
191+
exit 0
192+
fi
193+
echo "Waiting for React app... ($i/60)"
194+
sleep 2
195+
done
196+
echo "React app failed to start"
197+
exit 1
198+
- run:
199+
name: Run Cypress Tests
200+
command: cd app && CI=true npx cypress run
201+
- store_artifacts:
202+
path: app/cypress/videos
203+
- store_artifacts:
204+
path: app/cypress/screenshots
145205

146206
workflows:
147207
wrolpi-api-tests:
@@ -153,3 +213,5 @@ workflows:
153213
wrolpi-app-test:
154214
jobs:
155215
- react-app-start
216+
- jest-tests
217+
- cypress-e2e-tests

.gitignore

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -125,7 +125,7 @@ mapnik.xml
125125
docker-compose.override.yml
126126

127127
# test directory is used as media directory, we don't want to commit what a user downloads.
128-
test
128+
/test
129129
pg_data
130130

131131
# Directories used to build images
@@ -135,3 +135,5 @@ pg_data
135135
/pi-gen/*xz
136136

137137
.DS_Store
138+
app/cypress/screenshots
139+
app/cypress/videos

.mcp.json

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
{
2+
"mcpServers": {
3+
"postgres": {
4+
"command": "npx",
5+
"args": [
6+
"-y",
7+
"@modelcontextprotocol/server-postgres",
8+
"postgresql://postgres:wrolpi@localhost:5432/wrolpi"
9+
]
10+
}
11+
}
12+
}

alembic/versions/66407d145b76_.py

Lines changed: 87 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,87 @@
1+
"""Create collection table for domain collections
2+
3+
Revision ID: 66407d145b76
4+
Revises: 4f03b9548f6e
5+
Create Date: 2025-10-26 10:57:16.462524
6+
7+
"""
8+
import os
9+
from alembic import op
10+
import sqlalchemy as sa
11+
from sqlalchemy.orm import Session
12+
13+
14+
# revision identifiers, used by Alembic.
15+
revision = '66407d145b76'
16+
down_revision = '4f03b9548f6e'
17+
branch_labels = None
18+
depends_on = None
19+
20+
DOCKERIZED = True if os.environ.get('DOCKER', '').lower().startswith('t') else False
21+
22+
23+
def upgrade():
24+
bind = op.get_bind()
25+
session = Session(bind=bind)
26+
27+
# Step 1: Create new collection table (keeping channel table separate for now)
28+
op.create_table(
29+
'collection',
30+
sa.Column('id', sa.Integer(), nullable=False),
31+
sa.Column('name', sa.String(), nullable=False),
32+
sa.Column('description', sa.Text(), nullable=True),
33+
sa.Column('kind', sa.String(), nullable=False),
34+
sa.Column('directory', sa.Text(), nullable=True),
35+
sa.Column('tag_id', sa.Integer(), nullable=True),
36+
sa.Column('created_date', sa.DateTime(), server_default=sa.func.now(), nullable=False),
37+
sa.Column('item_count', sa.Integer(), nullable=False, server_default='0'),
38+
sa.Column('total_size', sa.Integer(), nullable=False, server_default='0'),
39+
sa.PrimaryKeyConstraint('id'),
40+
sa.ForeignKeyConstraint(['tag_id'], ['tag.id'], name='collection_tag_id_fkey'),
41+
sa.UniqueConstraint('directory', name='uq_collection_directory')
42+
)
43+
44+
# Create indexes for collection
45+
op.create_index('idx_collection_kind', 'collection', ['kind'], unique=False)
46+
op.create_index('idx_collection_item_count', 'collection', ['item_count'], unique=False)
47+
op.create_index('idx_collection_total_size', 'collection', ['total_size'], unique=False)
48+
49+
# Step 2: Create collection_item junction table
50+
op.create_table(
51+
'collection_item',
52+
sa.Column('id', sa.Integer(), nullable=False),
53+
sa.Column('collection_id', sa.Integer(), nullable=False),
54+
sa.Column('file_group_id', sa.Integer(), nullable=False),
55+
sa.Column('position', sa.Integer(), nullable=False, server_default='0'),
56+
sa.Column('added_date', sa.DateTime(), server_default=sa.func.now(), nullable=False),
57+
sa.PrimaryKeyConstraint('id'),
58+
sa.ForeignKeyConstraint(['collection_id'], ['collection.id'], ondelete='CASCADE'),
59+
sa.ForeignKeyConstraint(['file_group_id'], ['file_group.id'], ondelete='CASCADE'),
60+
sa.UniqueConstraint('collection_id', 'file_group_id', name='uq_collection_file_group')
61+
)
62+
63+
# Create indexes for collection_item
64+
op.create_index('idx_collection_item_collection_id', 'collection_item', ['collection_id'], unique=False)
65+
op.create_index('idx_collection_item_file_group_id', 'collection_item', ['file_group_id'], unique=False)
66+
op.create_index('idx_collection_item_position', 'collection_item', ['position'], unique=False)
67+
op.create_index('idx_collection_item_collection_position', 'collection_item', ['collection_id', 'position'], unique=False)
68+
69+
# Ensure table owner in non-docker environments
70+
if not DOCKERIZED:
71+
session.execute(sa.text('ALTER TABLE public.collection OWNER TO wrolpi'))
72+
session.execute(sa.text('ALTER TABLE public.collection_item OWNER TO wrolpi'))
73+
74+
75+
def downgrade():
76+
# Drop collection_item table and its indexes
77+
op.drop_index('idx_collection_item_collection_position', table_name='collection_item')
78+
op.drop_index('idx_collection_item_position', table_name='collection_item')
79+
op.drop_index('idx_collection_item_file_group_id', table_name='collection_item')
80+
op.drop_index('idx_collection_item_collection_id', table_name='collection_item')
81+
op.drop_table('collection_item')
82+
83+
# Drop collection table and its indexes
84+
op.drop_index('idx_collection_total_size', table_name='collection')
85+
op.drop_index('idx_collection_item_count', table_name='collection')
86+
op.drop_index('idx_collection_kind', table_name='collection')
87+
op.drop_table('collection')
Lines changed: 95 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,95 @@
1+
"""Add unique constraint on collection (name, kind)
2+
3+
This migration:
4+
1. Removes duplicate collections (keeping the one with most items/archives)
5+
2. Adds a unique constraint on (name, kind) to prevent future duplicates
6+
7+
Revision ID: add_unique_collection_name_kind
8+
Revises: migrate_download_to_collection
9+
Create Date: 2025-11-27
10+
"""
11+
import os
12+
from alembic import op
13+
import sqlalchemy as sa
14+
from sqlalchemy import text
15+
from sqlalchemy.orm import Session
16+
17+
# revision identifiers, used by Alembic.
18+
revision = 'add_unique_collection_name_kind'
19+
down_revision = 'migrate_download_to_collection'
20+
branch_labels = None
21+
depends_on = None
22+
23+
DOCKERIZED = True if os.environ.get('DOCKER', '').lower().startswith('t') else False
24+
25+
26+
def upgrade():
27+
bind = op.get_bind()
28+
session = Session(bind=bind)
29+
30+
print("\n" + "=" * 60)
31+
print("Add Unique Constraint on Collection (name, kind)")
32+
print("=" * 60 + "\n")
33+
34+
# Step 1: Find and remove duplicate collections
35+
print("Step 1: Finding duplicate collections...")
36+
37+
# Find duplicates - keep the one with the lowest id (first created)
38+
duplicates = session.execute(text("""
39+
SELECT name, kind, array_agg(id ORDER BY id) as ids
40+
FROM collection
41+
GROUP BY name, kind
42+
HAVING COUNT(*) > 1
43+
""")).fetchall()
44+
45+
if duplicates:
46+
print(f"Found {len(duplicates)} sets of duplicate collections")
47+
for name, kind, ids in duplicates:
48+
keep_id = ids[0] # Keep the first one (lowest id)
49+
remove_ids = ids[1:] # Remove the rest
50+
print(f" - '{name}' ({kind}): keeping id={keep_id}, removing ids={remove_ids}")
51+
52+
# Move any archives from duplicate collections to the one we're keeping
53+
for remove_id in remove_ids:
54+
session.execute(text("""
55+
UPDATE archive SET collection_id = :keep_id
56+
WHERE collection_id = :remove_id
57+
"""), {'keep_id': keep_id, 'remove_id': remove_id})
58+
59+
# Move any downloads from duplicate collections
60+
session.execute(text("""
61+
UPDATE download SET collection_id = :keep_id
62+
WHERE collection_id = :remove_id
63+
"""), {'keep_id': keep_id, 'remove_id': remove_id})
64+
65+
# Move any collection items
66+
session.execute(text("""
67+
UPDATE collection_item SET collection_id = :keep_id
68+
WHERE collection_id = :remove_id
69+
"""), {'keep_id': keep_id, 'remove_id': remove_id})
70+
71+
# Delete the duplicate collection
72+
session.execute(text("""
73+
DELETE FROM collection WHERE id = :remove_id
74+
"""), {'remove_id': remove_id})
75+
76+
session.commit()
77+
print("Duplicates removed\n")
78+
else:
79+
print("No duplicate collections found\n")
80+
81+
# Step 2: Add unique constraint
82+
print("Step 2: Adding unique constraint on (name, kind)...")
83+
op.create_unique_constraint('uq_collection_name_kind', 'collection', ['name', 'kind'])
84+
print("Done\n")
85+
86+
print("=" * 60)
87+
print("Migration Complete")
88+
print("=" * 60 + "\n")
89+
90+
if not DOCKERIZED:
91+
session.execute(text('ALTER TABLE public.collection OWNER TO wrolpi'))
92+
93+
94+
def downgrade():
95+
op.drop_constraint('uq_collection_name_kind', 'collection', type_='unique')
Lines changed: 73 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,73 @@
1+
"""Remove duplicate Channel fields delegated to Collection
2+
3+
This migration removes fields from Channel that now delegate to Collection:
4+
- name (now property: channel.name → collection.name)
5+
- directory (now property: channel.directory → collection.directory)
6+
- tag_id (now property: channel.tag_id → collection.tag_id)
7+
8+
These fields were already synced to Collection in previous migration.
9+
10+
Revision ID: b43f70f369d0
11+
Revises: ba98bd360b7a
12+
Create Date: 2025-11-19 21:48:58.488850
13+
14+
"""
15+
import os
16+
from alembic import op
17+
import sqlalchemy as sa
18+
from sqlalchemy import text
19+
from sqlalchemy.dialects import postgresql
20+
21+
# revision identifiers, used by Alembic.
22+
revision = 'b43f70f369d0'
23+
down_revision = 'ba98bd360b7a'
24+
branch_labels = None
25+
depends_on = None
26+
27+
DOCKERIZED = True if os.environ.get('DOCKER', '').lower().startswith('t') else False
28+
29+
30+
def upgrade():
31+
print("\n" + "="*60)
32+
print("Removing Duplicate Fields from Channel")
33+
print("="*60 + "\n")
34+
35+
# Step 1: Drop foreign key constraint on tag_id
36+
print("Step 1: Dropping tag_id foreign key constraint...")
37+
op.drop_constraint('channel_tag_id_fkey', 'channel', type_='foreignkey')
38+
print("✓ Dropped foreign key constraint\n")
39+
40+
# Step 2: Drop columns (data already in Collection)
41+
print("Step 2: Dropping duplicate columns...")
42+
op.drop_column('channel', 'name')
43+
print(" ✓ Dropped channel.name")
44+
op.drop_column('channel', 'directory')
45+
print(" ✓ Dropped channel.directory")
46+
op.drop_column('channel', 'tag_id')
47+
print(" ✓ Dropped channel.tag_id\n")
48+
49+
print("="*60)
50+
print("✓ Channel Cleanup Complete")
51+
print(" Channels now delegate name/directory/tag to Collection")
52+
print("="*60 + "\n")
53+
54+
55+
def downgrade():
56+
# Re-add the columns
57+
op.add_column('channel', sa.Column('name', sa.String(), nullable=True))
58+
op.add_column('channel', sa.Column('directory', sa.String(), nullable=True))
59+
op.add_column('channel', sa.Column('tag_id', sa.Integer(), nullable=True))
60+
61+
# Re-add foreign key
62+
op.create_foreign_key('channel_tag_id_fkey', 'channel', 'tag', ['tag_id'], ['id'], ondelete='CASCADE')
63+
64+
# Restore data from Collection
65+
bind = op.get_bind()
66+
bind.execute(text("""
67+
UPDATE channel
68+
SET name = collection.name,
69+
directory = collection.directory,
70+
tag_id = collection.tag_id
71+
FROM collection
72+
WHERE channel.collection_id = collection.id
73+
"""))

0 commit comments

Comments
 (0)