diff --git a/iridescent-ivies/.github/workflows/lint.yaml b/iridescent-ivies/.github/workflows/lint.yaml
new file mode 100644
index 00000000..7f67e803
--- /dev/null
+++ b/iridescent-ivies/.github/workflows/lint.yaml
@@ -0,0 +1,35 @@
+# GitHub Action workflow enforcing our code style.
+
+name: Lint
+
+# Trigger the workflow on both push (to the main repository, on the main branch)
+# and pull requests (against the main repository, but from any repo, from any branch).
+on:
+ push:
+ branches:
+ - main
+ pull_request:
+
+# Brand new concurrency setting! This ensures that not more than one run can be triggered for the same commit.
+# It is useful for pull requests coming from the main repository since both triggers will match.
+concurrency: lint-${{ github.sha }}
+
+jobs:
+ lint:
+ runs-on: ubuntu-latest
+
+ env:
+ # The Python version your project uses. Feel free to change this if required.
+ PYTHON_VERSION: "3.12"
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ - name: Set up Python ${{ env.PYTHON_VERSION }}
+ uses: actions/setup-python@v5
+ with:
+ python-version: ${{ env.PYTHON_VERSION }}
+
+ - name: Run pre-commit hooks
+ uses: pre-commit/action@v3.0.1
diff --git a/iridescent-ivies/.gitignore b/iridescent-ivies/.gitignore
new file mode 100644
index 00000000..08ef4ac4
--- /dev/null
+++ b/iridescent-ivies/.gitignore
@@ -0,0 +1,32 @@
+# Files generated by the interpreter
+__pycache__/
+*.py[cod]
+
+# Environment specific
+.venv
+venv
+.env
+env
+
+# Unittest reports
+.coverage*
+
+# Logs
+*.log
+
+# PyEnv version selector
+.python-version
+
+# Built objects
+*.so
+dist/
+build/
+uv.lock
+# IDEs
+# PyCharm
+.idea/
+# VSCode
+.vscode/
+# MacOS
+.DS_Store
+src/sql_bsky.egg-info/*
diff --git a/iridescent-ivies/.pre-commit-config.yaml b/iridescent-ivies/.pre-commit-config.yaml
new file mode 100644
index 00000000..de7f5fd9
--- /dev/null
+++ b/iridescent-ivies/.pre-commit-config.yaml
@@ -0,0 +1,28 @@
+repos:
+ - repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v5.0.0
+ hooks:
+ - id: check-toml
+ - id: check-yaml
+ - id: end-of-file-fixer
+ - id: trailing-whitespace
+ args: [--markdown-linebreak-ext=md]
+
+ - repo: https://github.com/astral-sh/ruff-pre-commit
+ rev: v0.12.2
+ hooks:
+ - id: ruff-check
+ - id: ruff-format
+
+ - repo: https://github.com/astral-sh/uv-pre-commit
+ rev: 0.8.8
+ hooks:
+ - id: pip-compile
+ args: [
+ "--universal",
+ "--python-version=3.12",
+ "pyproject.toml",
+ "--group=dev",
+ "-o",
+ "requirements.txt"
+ ]
diff --git a/iridescent-ivies/LICENSE.txt b/iridescent-ivies/LICENSE.txt
new file mode 100644
index 00000000..2f024be1
--- /dev/null
+++ b/iridescent-ivies/LICENSE.txt
@@ -0,0 +1,7 @@
+Copyright 2021 Iridescent Ivies
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/iridescent-ivies/README.md b/iridescent-ivies/README.md
new file mode 100644
index 00000000..97fa38d2
--- /dev/null
+++ b/iridescent-ivies/README.md
@@ -0,0 +1,131 @@
+## The Social Query Language (SQL-BSky)
+
+[](https://python.org)
+[](https://bsky.app)
+[](LICENSE.txt)
+[]()
+
+A retro terminal-style SQL interface for querying the BlueSky social network. Experience social media through the lens of structured query language with authentic CRT visual effects.
+
+
+
+## Features
+
+- **Dual Authentication**: Full BlueSky login or anonymous "stealth mode"
+- **Public API Access**: Query public content without authentication
+- **ASCII Art Images**: View embedded images as beautiful ASCII art
+- **Real-time Validation**: Live SQL syntax checking as you type
+- **Retro CRT Interface**: Authentic 1980s terminal experience with visual effects
+- **Fast Performance**: Optimized queries with scrolling support
+- **Easter Eggs**: Hidden surprises for the adventurous
+
+## Quick Start
+
+### Installation
+
+1. Clone the repository:
+ ```bash
+ git clone git@github.com:A5rocks/code-jam-12.git
+
+ # move to the dir
+ cd code-jam-12
+ ```
+2. Start the development server:
+ ```bash
+ python3 dev.py
+ ```
+
+3. That's it! Open your browser to: [http://localhost:8000](http://localhost:8000)
+
+### First Steps
+
+1. **Choose Authentication Mode**:
+ - **Authenticated**: Login with BlueSky credentials for full access
+ - **Stealth Mode**: Browse public content anonymously
+
+> [!NOTE]
+> If the page is slow, try disabling the CRT effect at this point.
+
+2. **Try Your First Query**:
+ ```sql
+ SELECT * FROM tables
+ ```
+
+ 
+
+3. **Explore Public Profiles**:
+ ```sql
+ SELECT * FROM profile WHERE actor = 'bsky.app'
+ ```
+
+## Query Reference
+
+### Available Tables
+
+| Table | Description | Auth Required | Parameters |
+|-------|-------------|---------------|------------|
+| `tables` | List all available tables | No | None |
+| `profile` | User profile information | No | `actor` (optional) |
+| `feed` | Posts from a specific user | No | `author` (required) |
+| `timeline` | Your personal timeline | Yes | None |
+| `suggestions` | Suggested users to follow | No | None |
+| `suggested_feed` | Recommended feeds | No | None |
+| `followers` | User's followers | No | `actor` (required) |
+| `following` | Who user follows | No | `actor` (required) |
+| `mutuals` | Mutual connections | No | `actor` (required) |
+| `likes` | User's liked posts | Yes | `actor` (required) |
+
+### Example Queries
+
+```sql
+SELECT * FROM feed WHERE author='bsky.app'
+```
+- This will get all fields from all posts from the author's feed
+
+```sql
+SELECT description FROM followers WHERE author='bsky.app'
+```
+- This will get the bio of all followers of the author
+
+```sql
+SELECT * FROM tables
+```
+- This will get all available table names
+
+## Known Issues
+
+> [!WARNING]
+> Please be aware of these current limitations before using the application.
+
+> [!NOTE]
+> Queries to non-existent tables or fields will return empty rows instead of proper error messages.
+
+**Example:**
+```sql
+-- Both of these return empty rows (same behavior)
+SELECT likes FROM feed WHERE author = "bsky.app"
+SELECT apples FROM feed WHERE author = "bsky.app"
+```
+
+### KeyError in Feed Processing
+> [!IMPORTANT]
+> There's a known KeyError where the system looks for `"feeds"` but should be looking for `"feed"`. This is a human error we discovered after the Code Jam programming time had ended, so we weren't able to fix it, but we're aware of the issue and it may cause some like-table-related queries to fail unexpectedly.
+
+##### Table `likes` Not Functional
+> [!CAUTION]
+> The `likes` table is currently broken and behaves like a non-existent table. This is due to the KeyError
+## Team - Iridescent Ivies
+
+- **A5rocks** - [GitHub](https://github.com/A5rocks) (Team Leader)
+- **TheHeretic** - [GitHub](https://github.com/DannyTheHeretic)
+- **Walkercito** - [GitHub](https://github.com/Walkercito)
+- **Joshdtbx** - [GitHub](https://github.com/giplgwm)
+- **Mimic** - [GitHub](https://github.com/Drakariboo)
+
+## License
+
+This project is licensed under the MIT License - see the [LICENSE.txt](LICENSE.txt) file for details.
+
+---
+
+**Thank you for exploring our project!!**
diff --git a/iridescent-ivies/assets/Init_sql_app.gif b/iridescent-ivies/assets/Init_sql_app.gif
new file mode 100644
index 00000000..f0c6634d
Binary files /dev/null and b/iridescent-ivies/assets/Init_sql_app.gif differ
diff --git a/iridescent-ivies/assets/run_test_query.gif b/iridescent-ivies/assets/run_test_query.gif
new file mode 100644
index 00000000..a1adab67
Binary files /dev/null and b/iridescent-ivies/assets/run_test_query.gif differ
diff --git a/iridescent-ivies/dev.py b/iridescent-ivies/dev.py
new file mode 100644
index 00000000..95f5e3e4
--- /dev/null
+++ b/iridescent-ivies/dev.py
@@ -0,0 +1,29 @@
+import http.server
+import os
+import socketserver
+import sys
+from pathlib import Path
+
+# use src as start point
+src_dir = Path(__file__).parent / "src"
+if src_dir.exists():
+ os.chdir(src_dir)
+ print(f"[*] Serving from: {src_dir.absolute()}")
+else:
+ print("[-] src/ dir not found")
+ sys.exit(1)
+
+PORT = 8000
+Handler = http.server.SimpleHTTPRequestHandler
+
+try:
+ with socketserver.TCPServer(("", PORT), Handler) as httpd:
+ print(f"[*] Server running at: http://localhost:{PORT}")
+ print(f"[*] Open: http://localhost:{PORT}/")
+ print("[-] Press Ctrl+C to stop")
+ httpd.serve_forever()
+except KeyboardInterrupt:
+ print("\nServer stopped")
+except OSError as e:
+ print(f"[-] Error: {e}")
+ print("[-] Try a different port: python dev.py --port 8001")
diff --git a/iridescent-ivies/pyproject.toml b/iridescent-ivies/pyproject.toml
new file mode 100644
index 00000000..f4c1c29c
--- /dev/null
+++ b/iridescent-ivies/pyproject.toml
@@ -0,0 +1,54 @@
+[project]
+name = "sql-bsky"
+description = "Social query language"
+version = "0.1.0"
+readme = "README.md"
+requires-python = ">=3.12"
+dependencies = [
+ "ascii-magic>=2.3.0",
+]
+
+[dependency-groups]
+dev = [
+ "pre-commit~=4.2.0",
+ "ruff~=0.12.2",
+ "pytest",
+]
+
+[tool.ruff]
+line-length = 119
+target-version = "py312"
+fix = true
+src = ["src"]
+
+[tool.ruff.lint]
+# Enable all linting rules.
+select = ["ALL"]
+# Ignore some of the most obnoxious linting errors.
+ignore = [
+ # Missing docstrings.
+ "D100",
+ "D104",
+ "D105",
+ "D106",
+ "D107",
+ # Docstring whitespace.
+ "D203",
+ "D213",
+ # Docstring punctuation.
+ "D415",
+ # Docstring quotes.
+ "D301",
+ # Builtins.
+ "A",
+ # Print statements.
+ "T20",
+ # TODOs.
+ "TD002",
+ "TD003",
+ "FIX",
+ # Conflicts with ruff format.
+ "COM812",
+ # Asserts are good, actually.
+ "S101",
+]
diff --git a/iridescent-ivies/requirements.txt b/iridescent-ivies/requirements.txt
new file mode 100644
index 00000000..3721e7c9
--- /dev/null
+++ b/iridescent-ivies/requirements.txt
@@ -0,0 +1,40 @@
+# This file was autogenerated by uv via the following command:
+# uv pip compile --universal --python-version=3.12 pyproject.toml --group=dev -o requirements.txt
+ascii-magic==2.3.0
+ # via sql-bsky (pyproject.toml)
+cfgv==3.4.0
+ # via pre-commit
+colorama==0.4.6
+ # via
+ # ascii-magic
+ # pytest
+distlib==0.4.0
+ # via virtualenv
+filelock==3.18.0
+ # via virtualenv
+identify==2.6.12
+ # via pre-commit
+iniconfig==2.1.0
+ # via pytest
+nodeenv==1.9.1
+ # via pre-commit
+packaging==25.0
+ # via pytest
+pillow==11.3.0
+ # via ascii-magic
+platformdirs==4.3.8
+ # via virtualenv
+pluggy==1.6.0
+ # via pytest
+pre-commit==4.2.0
+ # via sql-bsky (pyproject.toml:dev)
+pygments==2.19.2
+ # via pytest
+pytest==8.4.1
+ # via sql-bsky (pyproject.toml:dev)
+pyyaml==6.0.2
+ # via pre-commit
+ruff==0.12.8
+ # via sql-bsky (pyproject.toml:dev)
+virtualenv==20.33.1
+ # via pre-commit
diff --git a/iridescent-ivies/src/__init__.py b/iridescent-ivies/src/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/iridescent-ivies/src/api/__init__.py b/iridescent-ivies/src/api/__init__.py
new file mode 100644
index 00000000..4b9a314e
--- /dev/null
+++ b/iridescent-ivies/src/api/__init__.py
@@ -0,0 +1 @@
+# The main portion of fetching and recieving from Bsky
diff --git a/iridescent-ivies/src/api/auth_session.py b/iridescent-ivies/src/api/auth_session.py
new file mode 100644
index 00000000..e5f6d9e5
--- /dev/null
+++ b/iridescent-ivies/src/api/auth_session.py
@@ -0,0 +1,276 @@
+# Imports
+import json
+from typing import Literal
+
+from pyodide.http import FetchResponse, pyfetch # The system we will actually use
+
+LIMIT = 50 # The default limit amount
+
+
+class PyfetchSession:
+ """Pyfetch Session, emulating the request Session."""
+
+ def __init__(self, headers: dict | None = None) -> None:
+ """Pyfetch Session, emulating the request Session."""
+ self.default_headers = headers or {}
+
+ async def get(self, url: str, headers: dict | None = None) -> FetchResponse:
+ """Get request for the pyfetch.
+
+ Args:
+ url (str): The Endpoint to hit
+ headers (dict | None, optional): Any headers that will get added to the request. Defaults to "".
+
+ Returns:
+ FetchResponse: The return data from the request
+
+ """
+ merged_headers = self.default_headers.copy()
+ if headers:
+ merged_headers.update(headers)
+ return await pyfetch(
+ url,
+ method="GET",
+ headers=merged_headers,
+ )
+
+ async def post(
+ self,
+ url: str,
+ data: str | dict | None = "",
+ headers: dict | None = None,
+ ) -> FetchResponse:
+ """Post request.
+
+ Args:
+ url (str): The Endpoint to hit
+ data (str | dict | None, optional): A dictionary or string to use for the body. Defaults to "".
+ headers (dict | None, optional): Any headers that will get added to the request. Defaults to "".
+
+ Returns:
+ FetchResponse: The return data from the request
+
+ """
+ merged_headers = self.default_headers.copy()
+ if headers:
+ merged_headers.update(headers)
+ return await pyfetch(
+ url,
+ method="POST",
+ headers=merged_headers,
+ body=json.dumps(data) if isinstance(data, dict) else data,
+ )
+
+
+class BskySession:
+ """Class to establish an auth session."""
+
+ def __init__(self, username: str, password: str) -> None:
+ # Bluesky credentials
+ self.username = username
+ self.password = password
+ self.pds_host = "https://public.api.bsky.app"
+ # Instance client
+ self.client = PyfetchSession()
+ # Access token
+ self.access_jwt = None
+ # Refresh token
+ self.refresh_jwt = None
+
+ async def login(self) -> None:
+ """Create an authenticated session and save tokens."""
+ endpoint: str = "https://bsky.social/xrpc/com.atproto.server.createSession"
+ session_info: FetchResponse = await self.client.post(
+ endpoint,
+ headers={"Content-Type": "application/json"},
+ data={
+ "identifier": self.username,
+ "password": self.password,
+ },
+ )
+ session_info: dict = await session_info.json()
+ try:
+ self.access_jwt: str = session_info["accessJwt"]
+ self.refresh_jwt: str = session_info["refreshJwt"]
+ self.did: str = session_info["did"]
+ self.handle: str = session_info["handle"]
+ self.pds_host = "https://bsky.social"
+ self.client.default_headers.update(
+ {
+ "Content-Type": "application/json",
+ "Authorization": f"Bearer {self.access_jwt}",
+ },
+ )
+ except KeyError:
+ # TODO: Handle the error on the front end
+ return False
+ else:
+ return True
+
+ async def refresh_token(self) -> None:
+ """Refresh the token."""
+ endpoint = f"{self.pds_host}/xrpc/com.atproto.server.refreshSession"
+
+ session_info = await self.client.post(
+ endpoint, data="", headers={"Authorization": f"Bearer {self.refresh_jwt}"}
+ )
+ session_info = await session_info.json()
+ self.access_jwt = session_info["accessJwt"]
+ self.refresh_jwt = session_info["refreshJwt"]
+ self.did = session_info["did"]
+
+ self.client.default_headers.update(
+ {
+ "Content-Type": "application/json",
+ "Authorization": f"Bearer {self.access_jwt}",
+ },
+ )
+
+ ### Start of the actual endpoints -> https://docs.bsky.app/docs/api/at-protocol-xrpc-api
+ async def get_preferences(self) -> dict:
+ """Get the logged in users preferences."""
+ endpoint = f"{self.pds_host}/xrpc/app.bsky.actor.getPreferences"
+ response = await self.client.get(
+ endpoint,
+ )
+ return await response.json()
+
+ async def get_profile(self, actor: str) -> dict:
+ """Get a user profile."""
+ # If no actor specified and we're authenticated, use our handle
+ if actor is None:
+ if hasattr(self, "handle") and self.handle:
+ actor = self.handle
+ else:
+ # Return special error object for stealth mode
+ return {"stealth_error": True}
+
+ endpoint = f"{self.pds_host}/xrpc/app.bsky.actor.getProfile?actor={actor}"
+ response = await self.client.get(
+ endpoint,
+ )
+ return await response.json()
+
+ async def get_suggestions(self, limit: int = LIMIT, cursor: str = "") -> dict:
+ """Get the logged in users suggestion."""
+ endpoint = f"{self.pds_host}/xrpc/app.bsky.actor.getSuggestions?limit={limit}&cursor={cursor}"
+ response = await self.client.get(
+ endpoint,
+ )
+ return await response.json()
+
+ async def search_actors(self, q: str, limit: int = LIMIT, cursor: str = "") -> dict:
+ """Search for actors."""
+ endpoint = f"{self.pds_host}/xrpc/app.bsky.actor.searchActors?q={q}&limit={limit}&cursor={cursor}"
+ response = await self.client.get(
+ endpoint,
+ )
+ return await response.json()
+
+ async def get_actor_likes(self, actor: str, limit: int = LIMIT, cursor: str = "") -> dict: # Requires Auth
+ """Get a given actors likes."""
+ endpoint = f"{self.pds_host}/xrpc/app.bsky.feed.getActorLikes?actor={actor}&limit={limit}&cursor={cursor}"
+ response = await self.client.get(
+ endpoint,
+ )
+ return await response.json()
+
+ async def get_author_feed(self, actor: str, limit: int = LIMIT) -> dict:
+ """Get a specific user feed."""
+ endpoint = f"{self.pds_host}/xrpc/app.bsky.feed.getAuthorFeed?actor={actor}&limit={limit}"
+ response = await self.client.get(
+ endpoint,
+ )
+ return await response.json()
+
+ async def get_feed(self, feed: str, limit: int = LIMIT, cursor: str = "") -> dict:
+ """Get a specified feed."""
+ endpoint = f"{self.pds_host}/xrpc/app.bsky.feed.getFeed?feed={feed}&limit={limit}&cursor={cursor}"
+ response = await self.client.get(
+ endpoint,
+ )
+ return await response.json()
+
+ async def get_suggested_feeds(self, limit: int = LIMIT, cursor: str = "") -> dict:
+ """Get suggested feeds."""
+ endpoint = f"{self.pds_host}/xrpc/app.bsky.feed.getSuggestedFeeds?limit={limit}&cursor={cursor}"
+ response = await self.client.get(
+ endpoint,
+ )
+ return await response.json()
+
+ async def get_timeline(self) -> dict:
+ """Get a users timeline."""
+ endpoint = f"{self.pds_host}/xrpc/app.bsky.feed.getTimeline"
+ response = await self.client.get(
+ endpoint,
+ )
+ return await response.json()
+
+ # Only function that needs this many params, I am not making a data class for it
+ async def search_posts( # noqa: PLR0913
+ self,
+ q: str,
+ limit: int = LIMIT,
+ sort: Literal["top", "latest"] = "latest",
+ since: str = "",
+ until: str = "",
+ mentions: str = "",
+ author: str = "",
+ tag: str = "",
+ cursor: str = "",
+ ) -> dict:
+ """Search for bluesky posts.
+
+ Args:
+ q (str): the given query
+ sort (Literal["top", "latest"], optional): The sort Order. Defaults to "latest".
+ since (str, optional): Since when in YYYY-MM-DD format. Defaults to "".
+ until (str, optional): Until when in YYYY-MM-DD format. Defaults to "".
+ mentions (str, optional): Post mentions the given account. Defaults to "".
+ author (str, optional): Author of a given post. Defaults to "".
+ tag (str, optional): Tags on the post. Defaults to "".
+ limit (int, optional): Limit the number returned. Defaults to LIMIT.
+ cursor (str, optional): Bsky Cursor. Defaults to "".
+
+ """
+ endpoint = (
+ f"{self.pds_host}/xrpc/app.bsky.feed.searchPosts"
+ f"?q={q}&sort={sort}&since={since}&until={until}"
+ f"&mentions={mentions}&author={author}&tag={tag}"
+ f"&limit={limit}&cursor={cursor}"
+ )
+ response = await self.client.get(
+ endpoint,
+ )
+ return await response.json()
+
+ async def get_followers(self, actor: str, limit: int = LIMIT, cursor: str = "") -> dict:
+ """Get a users followers."""
+ endpoint = f"{self.pds_host}/xrpc/app.bsky.graph.getFollowers?actor={actor}&limit={limit}&cursor={cursor}"
+ response = await self.client.get(
+ endpoint,
+ )
+ return await response.json()
+
+ async def get_follows(self, actor: str, limit: int = LIMIT, cursor: str = "") -> dict:
+ """Get a users follows."""
+ endpoint = f"{self.pds_host}/xrpc/app.bsky.graph.getFollows?actor={actor}&limit={limit}&cursor={cursor}"
+ response = await self.client.get(
+ endpoint,
+ )
+ return await response.json()
+
+ async def get_mutual_follows(self, actor: str, limit: int = LIMIT, cursor: str = "") -> dict:
+ """Get a users mutual follows."""
+ endpoint = f"{self.pds_host}/xrpc/app.bsky.graph.getKnownFollowers?actor={actor}&limit={limit}&cursor={cursor}"
+ response = await self.client.get(
+ endpoint,
+ )
+ return await response.json()
+
+ async def get_blob(self, url: str) -> str:
+ """Get a specific blob."""
+ did, cid = url.split("/")[-2:]
+ cid = cid.split("@")[0]
+ return f"https://bsky.social/xrpc/com.atproto.sync.getBlob?did={did}&cid={cid}"
diff --git a/iridescent-ivies/src/core/__init__.py b/iridescent-ivies/src/core/__init__.py
new file mode 100644
index 00000000..446c6f0e
--- /dev/null
+++ b/iridescent-ivies/src/core/__init__.py
@@ -0,0 +1 @@
+# The core of the Social Query Language
diff --git a/iridescent-ivies/src/core/functions.py b/iridescent-ivies/src/core/functions.py
new file mode 100644
index 00000000..c4a31046
--- /dev/null
+++ b/iridescent-ivies/src/core/functions.py
@@ -0,0 +1,374 @@
+"""The main script file for Pyodide."""
+
+import frontend
+from frontend import CLEAR_BUTTON, EXECUTE_BUTTON, QUERY_INPUT, clear_interface, update_table
+from js import Event, document, window
+from parser import Parent, ParentKind, Token, TokenKind, Tree, parse, tokenize
+from pyodide.ffi import create_proxy
+from pyodide.ffi.wrappers import set_timeout
+
+
+def flatten_response(data: dict) -> dict:
+ """Flatten a dictionary."""
+ flattened_result = {}
+
+ def _flatten(current: dict, name: str = "") -> dict:
+ if isinstance(current, dict):
+ for field, value in current.items():
+ _flatten(value, name + field + "_")
+ elif isinstance(current, list):
+ """old code
+ # for idx, i in enumerate(current):
+ # _flatten(i, name + str(idx) + "_")
+ """
+ else:
+ flattened_result[name[:-1].lower()] = current # Drops the extra _
+
+ _flatten(data)
+ return flattened_result
+
+
+def blue_screen_of_death() -> None:
+ """Easter Egg: Show WinXP Blue Screen of Death."""
+ input_field = document.getElementById("query-input")
+ if input_field:
+ input_field.value = ""
+
+ bsod = document.createElement("div")
+ bsod.className = "bsod-overlay"
+ bsod.innerHTML = (
+ '
'
+ '
A problem has been detected and Windows has been shut down to prevent damage '
+ " to your computer.
"
+ '
IRQL_NOT_LESS_OR_EQUAL
'
+ '
'
+ " If this is the first time you've seen this stop error screen, "
+ " restart your computer. If this screen appears again, follow these steps:
"
+ " Check to make sure any new hardware or software is properly installed. "
+ " If this is a new installation, ask your hardware or software manufacturer "
+ " for any Windows updates you might need.
"
+ " If problems continue, disable or remove any newly installed hardware or software. "
+ " Disable BIOS memory options such as caching or shadowing. If you need to use "
+ " Safe Mode to remove or disable components, restart your computer, press F8 "
+ " to select Advanced Startup Options, and then select Safe Mode."
+ "
"
+ " *** Address 804F9319 base at 804D7000, DateStamp 3844d96e - ntoskrnl.exe
"
+ " Beginning dump of physical memory "
+ " Physical memory dump complete. "
+ " Contact your system administrator or technical support group for further assistance."
+ "
"
+ "
"
+ )
+
+ document.body.appendChild(bsod)
+ frontend.flash_screen("#0000ff", 100)
+
+ def remove_bsod() -> None:
+ if bsod.parentNode:
+ document.body.removeChild(bsod)
+ frontend.update_status("System recovered from critical error", "warning")
+ frontend.trigger_electric_wave()
+
+ set_timeout(create_proxy(remove_bsod), 4000)
+
+
+def clean_value(text: str) -> str:
+ """Remove surrounding single/double quotes if present."""
+ if isinstance(text, str) and (text[0] == text[-1]) and text[0] in ("'", '"'):
+ return text[1:-1]
+ return text
+
+
+def get_text(node: Tree) -> str:
+ """Recursively get the string value from a node (Parent or Token)."""
+ if hasattr(node, "text"):
+ return node.text
+ if hasattr(node, "children"):
+ return " ".join(get_text(child) for child in node.children)
+ return str(node)
+
+
+def walk_where(node: Tree) -> list[tuple | str]:
+ """Flatten sql expressions into [tuple, 'AND', tuple, ...]."""
+ if getattr(node, "kind", None).name == "EXPR_BINARY":
+ left, op, right = node.children
+ op_text = getattr(op, "text", None)
+
+ if op_text in ("AND", "OR"):
+ return [*walk_where(left), op_text, *walk_where(right)]
+
+ return [(clean_value(get_text(left)), op_text, clean_value(get_text(right)))]
+
+ if hasattr(node, "children"):
+ result = []
+ for child in node.children:
+ result.extend(walk_where(child))
+ return result
+
+ return []
+
+
+def get_limit(node: Tree) -> int | None:
+ """Get what the LIMIT clause of this SQL query contains."""
+ assert node.kind is ParentKind.FILE
+ stmt = node.children[0]
+ for it in stmt.children:
+ if it.kind is ParentKind.LIMIT_CLAUSE:
+ return int(it.children[1].text)
+
+ return None
+
+
+def extract_where(tree: Tree) -> tuple[str, str] | None:
+ """Extract the where clause from the tree."""
+ if not tree.kind == ParentKind.FILE:
+ raise ValueError
+ stmt = tree.children[0]
+ for c in stmt.children:
+ if c.kind == ParentKind.WHERE_CLAUSE:
+ return walk_where(c.children[1])
+ return []
+
+
+def extract_fields(tree: Tree) -> list[Token] | None:
+ """Extract the fields from the tree."""
+ if not tree.kind == ParentKind.FILE:
+ raise ValueError
+ stmt = tree.children[0]
+ for c in stmt.children:
+ if c.kind == ParentKind.FIELD_LIST:
+ return c.children[::2]
+ return []
+
+
+def extract_table(tree: Tree) -> str:
+ """Extract the Table from the tree."""
+ if tree.kind != ParentKind.FILE:
+ raise ValueError
+
+ stmt = tree.children[0] # SELECT_STMT
+ for c in stmt.children:
+ if c.kind == ParentKind.FROM_CLAUSE:
+ for child in c.children:
+ if child.kind == TokenKind.IDENTIFIER:
+ return child.text
+ break
+ return ""
+
+
+async def parse_input(_: Event) -> None:
+ """Start of the parser."""
+ query = QUERY_INPUT.value.strip()
+
+ clean_query = query.upper().replace(";", "").replace(",", "").strip()
+ if "DROP TABLE USERS" in clean_query:
+ frontend.update_status("what could go wrong?", "warning")
+ blue_screen_of_death()
+ return
+
+ tree = parse(tokenize(query))
+ if not check_query(tree):
+ return
+
+ await sql_to_api_handler(tree)
+
+
+async def processor(api: tuple[str, str], table: str, limit: int | None) -> dict: # noqa: C901, PLR0912, PLR0915
+ """Process the sql statements into a api call."""
+ val = {}
+ if table == "feed":
+ if api[0] in ["actor", "author"]:
+ feed = await window.session.get_author_feed(api[2], limit=limit)
+ val = feed["feed"]
+ elif api[0] == "feed":
+ feed = await window.session.get_feed(api[2], limit=limit)
+ val = feed["feed"]
+
+ elif table == "timeline":
+ feed = await window.session.get_timeline()
+ val = feed["feed"]
+ elif table == "profile":
+ if api[0] in ["actor", "author"]:
+ feed = await window.session.get_profile(api[2])
+ val = feed
+ else:
+ feed = await window.session.get_profile(None)
+ if isinstance(feed, dict) and feed.get("stealth_error"):
+ return "stealth_error"
+ val = feed
+ elif table == "suggestions":
+ feed = await window.session.get_suggestions(limit=limit)
+ val = feed["actors"]
+ elif table == "suggested_feed":
+ feed = await window.session.get_suggested_feeds(limit=limit)
+ val = feed["feeds"]
+ elif table == "likes":
+ if api[0] in ["actor", "author"]:
+ feed = await window.session.get_actor_likes(api[2], limit=limit)
+ val = feed["feeds"]
+ else:
+ pass
+ elif table == "followers":
+ if api[0] in ["actor", "author"]:
+ feed = await window.session.get_followers(api[2], limit=limit)
+ val = feed["followers"]
+ else:
+ pass
+ elif table == "following":
+ if api[0] in ["actor", "author"]:
+ feed = await window.session.get_follows(api[2], limit=limit)
+ val = feed["followers"]
+ else:
+ pass
+ elif table == "mutuals":
+ if api[0] in ["actor", "author"]:
+ feed = await window.session.get_mutual_follows(api[2], limit=limit)
+ val = feed["followers"]
+ else:
+ pass
+ elif table == "tables":
+ val = [
+ {"Table_Name": _}
+ for _ in [
+ "feed",
+ "timeline",
+ "profile",
+ "suggestions",
+ "suggested_feed",
+ "likes",
+ "followers",
+ "following",
+ "mutuals",
+ ]
+ ]
+ return val
+
+
+def _extract_images_from_post(data: dict) -> str:
+ """Extract any embedded images from a post and return them as a delimited string."""
+ if not isinstance(data, dict):
+ return ""
+
+ if "post" not in data:
+ return ""
+
+ post = data["post"]
+
+ # Check if the post has embedded content
+ if "embed" not in post:
+ return ""
+
+ embed_type = post["embed"].get("$type", "")
+
+ # Only process image embeds
+ if embed_type != "app.bsky.embed.images#view":
+ return ""
+
+ images = post["embed"].get("images", [])
+ if not images:
+ return ""
+
+ image_links = []
+ for image in images:
+ image_link = f"{image['thumb']},{image['fullsize']},{image['alt']}"
+ image_links.append(image_link)
+
+ return " | ".join(image_links)
+
+
+async def sql_to_api_handler(tree: Tree) -> dict:
+ """Handle going from SQL to the API."""
+ where_expr = extract_where(tree)
+ table = extract_table(tree)
+ fields = extract_fields(tree)
+ field_tokens = [i.children[0] for i in fields if i.kind != TokenKind.STAR]
+
+ for i in where_expr:
+ if i[0] in ["actor", "author", "feed"]:
+ api = i
+ break
+ else:
+ # No Where Expression Matches
+ api = ["", ""]
+
+ limit = get_limit(tree)
+ val = await processor(api, table, limit if limit is not None else 50)
+ if not val:
+ frontend.show_empty_table()
+ frontend.update_status(f"Error getting from {table}. Try: SELECT * FROM tables", "error") # noqa: S608 Not sql injection
+ frontend.trigger_electric_wave()
+ return {}
+
+ # Handle stealth mode error for profile queries
+ if val == "stealth_error":
+ frontend.show_empty_table()
+ frontend.update_status(
+ "Cannot get own profile in stealth mode. Try: SELECT * FROM profile WHERE actors = 'username.bsky.social'",
+ "warning",
+ )
+ frontend.trigger_electric_wave()
+ return {}
+
+ if isinstance(val, dict):
+ val = [val]
+
+ tb = document.getElementById("table-body")
+ tb.innerHTML = ""
+ head = []
+ if field_tokens:
+ head = [j.text for j in field_tokens]
+ body = []
+
+ for i in val:
+ data = i
+
+ # Only try to extract images if the data structure supports it
+ images = _extract_images_from_post(data)
+ if images and "post" in data:
+ data["post"]["images"] = images
+
+ d = flatten_response(data)
+
+ if field_tokens:
+ body.append({j: d.get(j.lower(), "") for j in head})
+ else:
+ body.append(d)
+ [head.append(k) for k in d if k not in head]
+
+ update_table(head, body)
+ frontend.update_status(f"Data successfully retrieved from {table}", "success")
+ return val
+
+
+async def check_query_input(_: Event) -> None:
+ """Check the query that is currently input."""
+ check_query(parse(tokenize(QUERY_INPUT.value.strip())))
+
+
+def check_query(tree: Tree) -> bool:
+ """Check a given query and update the status bar."""
+ errors = []
+ _check_query(tree, errors)
+ if errors:
+ frontend.update_status("\n".join(errors), "error")
+ return False
+ frontend.update_status("Query is OK", "success")
+ return True
+
+
+def _check_query(tree: Tree, errors: list[str]) -> None:
+ """Check a given query recursively."""
+ errors.extend([f"- {error}" for error in tree.errors])
+ if isinstance(tree, Parent):
+ for child in tree.children:
+ _check_query(child, errors)
+ if tree.kind is ParentKind.ERROR_TREE:
+ errors.append("- large error")
+
+
+EXECUTE_BUTTON.addEventListener("click", create_proxy(parse_input))
+CLEAR_BUTTON.addEventListener("click", create_proxy(clear_interface))
+QUERY_INPUT.addEventListener("keydown", create_proxy(check_query_input))
diff --git a/iridescent-ivies/src/core/parser.py b/iridescent-ivies/src/core/parser.py
new file mode 100644
index 00000000..3599b690
--- /dev/null
+++ b/iridescent-ivies/src/core/parser.py
@@ -0,0 +1,559 @@
+from __future__ import annotations
+
+import string
+import textwrap
+from dataclasses import dataclass, field
+from enum import Enum, auto
+from typing import Literal
+
+
+# tokenizer:
+@dataclass
+class Token:
+ """A token produced by tokenization."""
+
+ kind: TokenKind
+ text: str
+ start_pos: int
+ end_pos: int
+ errors: list[str] = field(default_factory=list)
+
+
+class TokenKind(Enum):
+ """What the token represents."""
+
+ # keywords
+ SELECT = auto()
+ FROM = auto()
+ WHERE = auto()
+ LIMIT = auto()
+
+ # literals
+ STRING = auto()
+ INTEGER = auto()
+ IDENTIFIER = auto()
+ STAR = auto()
+
+ # operators
+ EQUALS = auto()
+ AND = auto()
+ GT = auto()
+ LT = auto()
+
+ # structure
+ COMMA = auto()
+ ERROR = auto()
+ EOF = auto() # this is a fake token only made and used in the parser
+
+
+KEYWORDS = {
+ "SELECT": TokenKind.SELECT,
+ "FROM": TokenKind.FROM,
+ "WHERE": TokenKind.WHERE,
+ "AND": TokenKind.AND,
+ "LIMIT": TokenKind.LIMIT,
+}
+
+
+@dataclass
+class Cursor:
+ """Helper class to allow peeking into a stream of characters."""
+
+ contents: str
+ index: int = 0
+
+ def peek(self) -> str:
+ """Look one character ahead in the stream."""
+ return self.contents[self.index : self.index + 1]
+
+ def next(self) -> str:
+ """Get the next character in the stream."""
+ c = self.peek()
+ if c != "":
+ self.index += 1
+ return c
+
+
+def tokenize(query: str) -> list[Token]: # noqa: PLR0912, C901
+ """Turn a query into a list of tokens."""
+ result = []
+
+ cursor = Cursor(query)
+ while True:
+ idx = cursor.index
+ char = cursor.next()
+
+ if char == "":
+ break
+
+ if char in string.ascii_letters:
+ char = cursor.peek()
+
+ while char in string.ascii_letters + "._":
+ cursor.next()
+ char = cursor.peek()
+ if char == "":
+ break
+
+ identifier = cursor.contents[idx : cursor.index]
+ kind = KEYWORDS.get(identifier, TokenKind.IDENTIFIER)
+ result.append(Token(kind, identifier, idx, cursor.index))
+
+ elif char in string.digits:
+ char = cursor.peek()
+
+ while char in string.digits:
+ cursor.next()
+ char = cursor.peek()
+ if char == "":
+ break
+
+ result.append(Token(TokenKind.INTEGER, cursor.contents[idx : cursor.index], idx, cursor.index))
+
+ elif char == ",":
+ result.append(Token(TokenKind.COMMA, ",", idx, cursor.index))
+
+ elif char == "*":
+ result.append(Token(TokenKind.STAR, "*", idx, cursor.index))
+
+ elif char == "'":
+ # idk escaping rules in SQL lol
+ char = cursor.peek()
+ while char != "'":
+ cursor.next()
+ char = cursor.peek()
+ if char == "":
+ break
+
+ cursor.next() # get the last '
+
+ string_result = cursor.contents[idx : cursor.index]
+ kind = TokenKind.STRING if string_result.endswith("'") and len(string_result) > 1 else TokenKind.ERROR
+ result.append(Token(kind, string_result, idx, cursor.index))
+
+ elif char == "=":
+ result.append(Token(TokenKind.EQUALS, "=", idx, cursor.index))
+
+ elif char == ">":
+ # TODO: gte?
+ result.append(Token(TokenKind.GT, ">", idx, cursor.index))
+
+ elif char == "<":
+ result.append(Token(TokenKind.LT, "<", idx, cursor.index))
+
+ return result
+
+
+# parser
+# heavily inspired by https://matklad.github.io/2023/05/21/resilient-ll-parsing-tutorial.html
+@dataclass
+class Parser:
+ """Helper class that provides useful parser functionality."""
+
+ contents: list[Token]
+ events: list[Event] = field(default_factory=list)
+ index: int = 0
+ unreported_errors: list[str] = field(default_factory=list)
+
+ def eof(self) -> bool:
+ """Check whether the token stream is done."""
+ return self.index == len(self.contents)
+
+ def peek(self) -> TokenKind:
+ """Look at the next kind of token in the stream."""
+ if self.eof():
+ return TokenKind.EOF
+ return self.contents[self.index].kind
+
+ def advance(self) -> None:
+ """Move to the next token in the stream."""
+ self.index += 1
+ self.events.append("ADVANCE")
+
+ def advance_with_error(self, error: str) -> None:
+ """Mark the current token as being wrong."""
+ if self.eof():
+ # this should probably be done better...
+ self.unreported_errors.append(error)
+ else:
+ self.contents[self.index].errors.append(error)
+ self.advance()
+
+ def open(self) -> int:
+ """Start nesting children."""
+ result = len(self.events)
+ self.events.append(("OPEN", ParentKind.ERROR_TREE))
+ return result
+
+ def open_before(self, index: int) -> int:
+ """Start nesting children before a given point."""
+ self.events.insert(index, ("OPEN", ParentKind.ERROR_TREE))
+ return index
+
+ def close(self, kind: ParentKind, where: int) -> int:
+ """Stop nesting children and note the tree type."""
+ self.events[where] = ("OPEN", kind)
+ self.events.append("CLOSE")
+ return where
+
+ def expect(self, kind: TokenKind, error: str) -> None:
+ """Ensure the next token is a specific kind and advance."""
+ if self.at(kind):
+ self.advance()
+ else:
+ self.advance_with_error(error)
+
+ def at(self, kind: TokenKind) -> None:
+ """Check if the next token is a specific kind."""
+ return self.peek() == kind
+
+
+@dataclass
+class Parent:
+ """Syntax tree element with children."""
+
+ kind: ParentKind
+ children: list[Tree]
+ errors: list[str] = field(default_factory=list)
+
+
+class ParentKind(Enum):
+ """Kinds of syntax tree elements that have children."""
+
+ SELECT_STMT = auto()
+ ERROR_TREE = auto()
+ FIELD_LIST = auto()
+ FROM_CLAUSE = auto()
+ WHERE_CLAUSE = auto()
+ LIMIT_CLAUSE = auto()
+ EXPR_NAME = auto()
+ EXPR_STRING = auto()
+ EXPR_INTEGER = auto()
+ EXPR_BINARY = auto()
+ FILE = auto()
+
+
+Tree = Parent | Token
+Event = Literal["ADVANCE", "CLOSE"] | tuple[Literal["OPEN"], ParentKind]
+
+
+def turn_tokens_into_events(tokens: list[Token]) -> list[Event]:
+ """Parse a token stream into a list of events."""
+ parser = Parser(tokens, [])
+ while not parser.eof():
+ _parse_stmt(parser)
+ return parser.events, parser.unreported_errors
+
+
+def parse(tokens: list[Token]) -> Tree:
+ """Parse a token stream into a syntax tree."""
+ events, errors = turn_tokens_into_events(tokens)
+ stack = [("OPEN", ParentKind.FILE)]
+ events.append("CLOSE")
+
+ i = 0
+ for event in events:
+ if event == "ADVANCE":
+ stack.append(tokens[i])
+ i += 1
+ elif event == "CLOSE":
+ inner = []
+ while True:
+ e = stack.pop()
+ if isinstance(e, tuple) and e[0] == "OPEN":
+ inner.reverse()
+ stack.append(Parent(e[1], inner))
+ break
+ inner.append(e)
+ else:
+ assert isinstance(event, tuple)
+ assert event[0] == "OPEN"
+ stack.append(event)
+
+ assert i == len(tokens)
+ assert len(stack) == 1
+ result = stack[0]
+ assert isinstance(result, Tree)
+ assert result.kind == ParentKind.FILE
+ result.errors.extend(errors)
+ return result
+
+
+# free parser functions
+def _parse_stmt(parser: Parser) -> None:
+ #
+ _parse_select_stmt(parser)
+
+
+def _parse_select_stmt(parser: Parser) -> None:
+ # 'SELECT' [ ',' ]* [ 'FROM' IDENTIFIER ] [ 'WHERE' ]
+ start = parser.open()
+ parser.expect(TokenKind.SELECT, "only SELECT is supported")
+
+ fields_start = parser.open()
+ _parse_field(parser)
+ while parser.at(TokenKind.COMMA):
+ parser.advance()
+ _parse_field(parser)
+ parser.close(ParentKind.FIELD_LIST, fields_start)
+
+ if parser.at(TokenKind.FROM):
+ # from clause
+ from_start = parser.open()
+ parser.advance()
+
+ parser.expect(TokenKind.IDENTIFIER, "expected to select from a table")
+ parser.close(ParentKind.FROM_CLAUSE, from_start)
+
+ if parser.at(TokenKind.WHERE):
+ # where clause
+ where_start = parser.open()
+ parser.advance()
+
+ _parse_expr(parser)
+ parser.close(ParentKind.WHERE_CLAUSE, where_start)
+
+ if parser.at(TokenKind.LIMIT):
+ limit_start = parser.open()
+ parser.advance()
+ parser.expect(TokenKind.INTEGER, "expected an integer")
+ parser.close(ParentKind.LIMIT_CLAUSE, limit_start)
+
+ parser.close(ParentKind.SELECT_STMT, start)
+
+
+def _parse_field(parser: Parser) -> None:
+ # '*' |
+ if parser.at(TokenKind.STAR):
+ parser.advance()
+ else:
+ _parse_expr(parser)
+
+
+def _parse_expr(parser: Parser) -> None:
+ # | =
+ _parse_expr_inner(parser, TokenKind.EOF)
+
+
+def _parse_expr_inner(parser: Parser, left_op: TokenKind) -> None:
+ left = _parse_small_expr(parser)
+
+ while True:
+ right_op = parser.peek()
+ if right_goes_first(left_op, right_op):
+ # if we have A B C ...,
+ # then we need to parse (A (B C ...))
+ outer = parser.open_before(left)
+ parser.advance()
+ _parse_expr_inner(parser, right_op) # (B C ...)
+ parser.close(ParentKind.EXPR_BINARY, outer)
+ else:
+ # (A B) C will be handled
+ # (if this were toplevel, right_goes_first will happen)
+ break
+
+
+def _parse_small_expr(parser: Parser) -> int:
+ # IDENTIFIER
+ # TODO: it looks like this parser.open() is unnecessary
+ start = parser.open()
+ if parser.at(TokenKind.IDENTIFIER):
+ parser.advance()
+ return parser.close(ParentKind.EXPR_NAME, start)
+ if parser.at(TokenKind.STRING):
+ parser.advance()
+ return parser.close(ParentKind.EXPR_STRING, start)
+ if parser.at(TokenKind.INTEGER):
+ parser.advance()
+ return parser.close(ParentKind.EXPR_INTEGER, start)
+ parser.advance_with_error("expected expression")
+ return parser.close(ParentKind.ERROR_TREE, start)
+
+
+TABLE = [[TokenKind.AND], [TokenKind.EQUALS, TokenKind.GT, TokenKind.LT]]
+
+
+def right_goes_first(left: TokenKind, right: TokenKind) -> bool:
+ """Understand which token type binds tighter.
+
+ We say that A B C is equivalent to:
+ - A (B C) if we return True
+ - (A B) C if we return False
+ """
+ left_idx = next((i for i, r in enumerate(TABLE) if left in r), None)
+ right_idx = next((i for i, r in enumerate(TABLE) if right in r), None)
+
+ if right_idx is None:
+ # evaluate left-to-right
+ return False
+ if left_idx is None:
+ # well, maybe left doesn't exist?
+ assert left == TokenKind.EOF
+ return True
+
+ return right_idx > left_idx
+
+
+##### tests: (this should be moved to a proper tests folder)
+
+
+def check_tok(before: str, after: TokenKind) -> None:
+ """Test helper which checks a string tokenizes to a single given token kind."""
+ assert [tok.kind for tok in tokenize(before)] == [after]
+
+
+def stringify_tokens(query: str) -> str:
+ """Test helper which turns a query into a repr of the tokens.
+
+ Used for manual snapshot testing.
+ """
+ tokens = tokenize(query)
+ result = ""
+ for i, c in enumerate(query):
+ for tok in tokens:
+ if tok.end_pos == i:
+ result += "<"
+
+ for tok in tokens:
+ if tok.start_pos == i:
+ result += ">"
+
+ result += c
+
+ i += 1
+ for tok in tokens:
+ if tok.end_pos == i:
+ result += "<"
+
+ return result
+
+
+def _stringify_tree(tree: Tree) -> list[str]:
+ result = []
+ if isinstance(tree, Parent):
+ result.append(f"{tree.kind.name}")
+ result.extend(" " + line for child in tree.children for line in _stringify_tree(child))
+ else:
+ repr = f'{tree.kind.name} ("{tree.text}")'
+ if tree.errors:
+ repr += " -- "
+ repr += " / ".join(tree.errors)
+ result.append(repr)
+
+ return result
+
+
+def stringify_tree(tree: Tree) -> str:
+ """Test helper that turns a syntax tree into a representation of it.
+
+ Used for manual snapshot testing
+ """
+ assert not tree.errors
+ return "\n".join(_stringify_tree(tree))
+
+
+def test_simple_tokens() -> None:
+ """Tests that various things tokenize correct in minimal cases."""
+ assert [tok.kind for tok in tokenize("")] == []
+ check_tok("SELECT", TokenKind.SELECT)
+ check_tok("FROM", TokenKind.FROM)
+ check_tok("WHERE", TokenKind.WHERE)
+ check_tok("AND", TokenKind.AND)
+ check_tok("'hello :)'", TokenKind.STRING)
+ check_tok("12345", TokenKind.INTEGER)
+ check_tok(",", TokenKind.COMMA)
+ check_tok("*", TokenKind.STAR)
+ check_tok("username", TokenKind.IDENTIFIER)
+ check_tok("username_b", TokenKind.IDENTIFIER)
+
+
+def test_tokenize_simple_select() -> None:
+ """Tests that tokenization works in more general cases."""
+ assert stringify_tokens("SELECT * FROM posts") == ">SELECT< >*< >FROM< >posts<"
+
+
+def test_parse_simple() -> None:
+ """Tests that parsing works in some specific cases."""
+ assert (
+ stringify_tree(parse(tokenize("SELECT * FROM posts")))
+ == textwrap.dedent("""
+ FILE
+ SELECT_STMT
+ SELECT ("SELECT")
+ FIELD_LIST
+ STAR ("*")
+ FROM_CLAUSE
+ FROM ("FROM")
+ IDENTIFIER ("posts")
+ """).strip()
+ )
+
+ assert (
+ stringify_tree(parse(tokenize("SELECT * WHERE actor = 'aaa'")))
+ == textwrap.dedent("""
+ FILE
+ SELECT_STMT
+ SELECT ("SELECT")
+ FIELD_LIST
+ STAR ("*")
+ WHERE_CLAUSE
+ WHERE ("WHERE")
+ EXPR_BINARY
+ EXPR_NAME
+ IDENTIFIER ("actor")
+ EQUALS ("=")
+ EXPR_STRING
+ STRING ("'aaa'")
+ """).strip()
+ )
+
+ assert (
+ stringify_tree(parse(tokenize("SELECT 4 WHERE actor = 'a' AND likes > 10")))
+ == textwrap.dedent("""
+ FILE
+ SELECT_STMT
+ SELECT ("SELECT")
+ FIELD_LIST
+ EXPR_INTEGER
+ INTEGER ("4")
+ WHERE_CLAUSE
+ WHERE ("WHERE")
+ EXPR_BINARY
+ EXPR_BINARY
+ EXPR_NAME
+ IDENTIFIER ("actor")
+ EQUALS ("=")
+ EXPR_STRING
+ STRING ("'a'")
+ AND ("AND")
+ EXPR_BINARY
+ EXPR_NAME
+ IDENTIFIER ("likes")
+ GT (">")
+ EXPR_INTEGER
+ INTEGER ("10")
+ """).strip()
+ )
+
+ assert (
+ stringify_tree(parse(tokenize("SELECT 4 LIMIT 0")))
+ == textwrap.dedent("""
+ FILE
+ SELECT_STMT
+ SELECT ("SELECT")
+ FIELD_LIST
+ EXPR_INTEGER
+ INTEGER ("4")
+ LIMIT_CLAUSE
+ LIMIT ("LIMIT")
+ INTEGER ("0")
+ """).strip()
+ )
+
+
+if __name__ == "__main__":
+ query = input("query> ")
+ print(stringify_tokens(query))
+
+ print()
+ print(stringify_tree(parse(tokenize(query))))
diff --git a/iridescent-ivies/src/core/setup.py b/iridescent-ivies/src/core/setup.py
new file mode 100644
index 00000000..812eda31
--- /dev/null
+++ b/iridescent-ivies/src/core/setup.py
@@ -0,0 +1,34 @@
+"""The Setup Script for pyodide."""
+
+from pathlib import Path
+
+import micropip
+from pyodide.http import pyfetch
+
+
+async def setup_pyodide_scripts() -> None:
+ """Script to do everything for pyodide."""
+ response = await pyfetch("./core/functions.py")
+ with Path.open("functions.py", "wb") as f:
+ f.write(await response.bytes())
+
+ response = await pyfetch("./core/parser.py")
+ with Path.open("parser.py", "wb") as f:
+ f.write(await response.bytes())
+
+ response = await pyfetch("./ui/image_modal.py")
+ with Path.open("image_modal.py", "wb") as f:
+ f.write(await response.bytes())
+
+ response = await pyfetch("./ui/frontend.py")
+ with Path.open("frontend.py", "wb") as f:
+ f.write(await response.bytes())
+ await micropip.install("ascii_magic")
+
+ response = await pyfetch("./api/auth_session.py")
+ with Path.open("auth_session.py", "wb") as f:
+ f.write(await response.bytes())
+
+ response = await pyfetch("./ui/auth_modal.py")
+ with Path.open("auth_modal.py", "wb") as f:
+ f.write(await response.bytes())
diff --git a/iridescent-ivies/src/index.html b/iridescent-ivies/src/index.html
new file mode 100644
index 00000000..6a06811a
--- /dev/null
+++ b/iridescent-ivies/src/index.html
@@ -0,0 +1,310 @@
+
+
+
+
+
+ The Social Query Language v1.0
+
+
+
+
+
+
+
+