diff --git a/.gitignore b/.gitignore index 7f442c8..2880aae 100644 --- a/.gitignore +++ b/.gitignore @@ -36,4 +36,6 @@ site/ # Reference repos typeql-ref/ -type-bridge-core/target/ +type-bridge-core/**/target/ +*.profraw +*.profdata diff --git a/CHANGELOG.md b/CHANGELOG.md index 70036f0..2da021c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,54 +2,124 @@ All notable changes to TypeBridge will be documented in this file. +## [Unreleased] + +## [1.4.1] - 2026-03-03 + +### New Features + +#### Lifecycle Hook System (PR #118, closes #116) + +Three-layer hook system for reacting to CRUD lifecycle events (audit logging, validation, cache invalidation, async notifications). + +##### Python ORM — `type_bridge.crud.hooks` +- **`CrudEvent` enum** — `PRE_INSERT`, `POST_INSERT`, `PRE_UPDATE`, `POST_UPDATE`, `PRE_DELETE`, `POST_DELETE`, `PRE_PUT`, `POST_PUT` +- **`LifecycleHook` protocol** — implement only the methods you need (`pre_insert`, `post_delete`, etc.) +- **`HookCancelled` exception** — raise in a pre-hook to abort the operation +- **Per-manager registration** — `manager.add_hook(hook)` / `manager.remove_hook(hook)`, chainable +- **`should_run(event, sender)`** filtering by event type or model class +- Pre-hooks run in registration order; post-hooks run in reverse order (middleware unwinding) +- Zero overhead when no hooks are registered + +##### Rust ORM — `type-bridge-orm` +- **`LifecycleHook` trait** with `HookContext`, `PreHookResult` (Continue/Reject), and `HookRunner` +- Integrated into `EntityManager` and `RelationManager` via `add_hook()` +- Same semantics as the Python layer (registration-order pre-hooks, reverse-order post-hooks) + +##### Rust Server — `type-bridge-server` +- **`CrudInfo`** on `RequestContext` — operation, type_name, type_kind, attribute_names, iid +- **`CrudInterceptor` trait** with `on_crud_request` / `on_crud_response` and `should_intercept` +- **`CrudInterceptorAdapter`** bridges `CrudInterceptor` into the existing `Interceptor` chain + +#### Put (Upsert) Clause — `type-bridge-core-lib` +- **Added `Clause::Put(Vec)` variant** for idempotent insert (upsert) operations + - Parser: `parse_put_clause` with keyword lookahead in both `parse_patterns` and `parse_statements` + - Compiler: Generates `put\n;` TypeQL output + - Validation: Reuses Insert validation rules (`Clause::Insert | Clause::Put`) + - Full roundtrip support (parse → AST → compile → parse) + +### Refactoring + +#### Server API Simplification — `type-bridge-server` +- **Removed standalone CRUD module** (`/entities/*`, `/relations/*` endpoints, `CrudQueryBuilder`, raw query support) — superseded by the interceptor-based design +- **Removed `crud_builder` benchmark** and `criterion` dev-dependency +- **Server now has 4 endpoints**: `POST /query`, `POST /query/validate`, `GET /health`, `GET /schema` + ## [1.4.0] - 2026-02-20 +### Highlights + +- 5 new Rust crates: `core-lib`, `orm`, `orm-derive`, `server`, and `python` (PyO3 bindings) +- Up to **40x faster validation** and **2.5x faster query compilation** via the Rust backend +- Async Rust ORM with derive macros, chainable queries, and batch operations +- Query-intercepting proxy server with REST endpoints +- MkDocs Material documentation site + ### New Features -#### Rust Core Integration (PRs #95, #101–#107) +#### Rust Core — `type-bridge-core-lib` (PRs #95, #101-#108) - **TypeQL schema parser** with inheritance resolution and PyO3 bindings - **TypeQL query parser** with bidirectional AST roundtrip -- **Schema-aware query validation** with PyO3 bindings +- **Schema-aware query validation** with statement and pattern validators - **Rust-backed value coercion** and `format_value` - **Custom validation rules** with portable JSON DSL - **Wired Rust core into Python** compiler and validation pipeline -#### Rust ORM — `type-bridge-orm` (PR #114) -- **Async Rust ORM** with entity CRUD and mock-testable session layer -- **Derive macros** — `TypeBridgeEntity`, `TypeBridgeAttribute`, `TypeBridgeRelation` -- **Relation support** with update/put operations +#### Async Rust ORM — `type-bridge-orm` (PR #114) +- **Async ORM** with entity CRUD and mock-testable session layer +- **Derive macros** — `#[derive(TypeBridgeEntity)]`, `#[derive(TypeBridgeRelation)]`, `#[derive(TypeBridgeAttribute)]` - **Chainable query builders** with expression filtering and aggregation - **Schema management** — registration, generation, diff, and sync -- **Abstract types, inheritance, and code generator** +- **Abstract types** with inheritance and code generation - **Batch operations** — `insert_many`, `delete_many`, `update_many` - **`FieldRef`** for type-safe query field references -- **`include_schema!` proc-macro** for compile-time TQL codegen +- **`include_schema!`** proc-macro for compile-time TQL codegen - **Schema introspection** from live TypeDB database - **Group-by queries** with `GroupByResult` - **Role player field access** for relation query filtering -- **Expression helpers** — `in_range`, `startswith`, `endswith` -- **Connection pooling** with `Database::into_shared` +- **Expression helpers** — `in_range()`, `startswith()`, `endswith()` +- **Connection pooling** with `Database::into_shared()` - **Serde support** on all ORM model types -- **Structured tracing spans** on all public methods +- **Structured tracing** spans on all public methods -#### Query Intercept Proxy Server — `type-bridge-server` (PR #109) +#### Query Intercept Proxy — `type-bridge-server` (PR #109) - **REST CRUD endpoints** with schema-aware query building -- **`CrudQueryBuilder` PyO3 class** for TypeQL generation -- **Extensible library/framework architecture** -- **207 tests** with 100% MC/DC coverage and CI codecov integration +- **`CrudQueryBuilder` PyO3 class** for TypeQL generation from Python +- **Extensible library/framework** — pluggable `QueryExecutor`, `Interceptor`, `SchemaSource` +- **207 tests** with 100% MC/DC coverage -### Improvements +### Performance: Python vs Rust + +#### Validation + +| Operation | Python | Rust | Speedup | +|-----------|--------|------|---------| +| Single type name | 1.89 us | 354.75 ns | **5.3x** | +| Long name (100+ chars) | 24.90 us | 620.52 ns | **40.1x** | +| Batch 1,000 names | 5.32 ms | 266.80 us | **19.9x** | +| Batch 5,000 names | 28.02 ms | 1.33 ms | **21.1x** | + +#### Query Compilation (via serde bridge) + +| Operation | Python | Rust | Speedup | +|-----------|--------|------|---------| +| Standalone update | 93.28 us | 37.82 us | **2.5x** | +| Large batch (200 clauses) | 2.26 ms | 1.07 ms | **2.1x** | +| Heavy insert (100x6) | 1.71 ms | 896.19 us | **1.9x** | + +### Documentation & CI -#### Documentation & CI - **MkDocs + Material** documentation site with auto-generated API reference (PR #98) -- **Rust crate CI** and multi-platform wheel builds (PR #95) -- **Comprehensive benchmark suite** with TOML storage and diff support (PR #103) -- Full documentation and metadata polish for Rust core +- **Rust crate CI** with multi-platform wheel builds — Linux (x86_64, aarch64), macOS (x86_64, aarch64), Windows (x86_64) (PR #95) +- **Comprehensive benchmark suite** with TOML storage, comparison reports, and markdown generation (PR #103) +- **Codecov integration** for Rust coverage tracking (PR #109) ### Bug Fixes - Resolve Rust 1.93.0 clippy lint errors - Pin Python 3.13 for Rust CI jobs and fix coverage script +- Add version specifiers to inter-crate path dependencies for crates.io publishing +- Make release workflow idempotent (skip already-published packages) ## [1.3.0] - 2026-02-09 diff --git a/CLAUDE.md b/CLAUDE.md index 182226c..8cc04e8 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -84,6 +84,7 @@ type_bridge/ │ ├── base.py # Type variables (E, R) │ ├── utils.py # Shared utilities (format_value, is_multi_value_attribute) │ ├── exceptions.py # CRUD exceptions +│ ├── hooks.py # Lifecycle hooks (CrudEvent, HookCancelled, CrudHook, HookRunner) │ ├── entity/ # Entity CRUD operations │ │ ├── __init__.py # Entity module exports │ │ ├── manager.py # EntityManager class diff --git a/README.md b/README.md index 548c5ee..8c3e535 100644 --- a/README.md +++ b/README.md @@ -25,6 +25,7 @@ A modern, Pythonic ORM for [TypeDB](https://github.com/typedb/typedb) with an At - **Data Validation**: Automatic type checking and coercion via Pydantic, including keyword validation - **JSON Support**: Seamless JSON serialization/deserialization - **CRUD Operations**: Full CRUD with fetching API (get, filter, all, update) for entities and relations +- **Lifecycle Hooks**: Pre/post-operation hooks for audit logging, validation, cache invalidation, and async notifications - **Chainable Operations**: Filter, delete, and bulk update with method chaining and lambda functions - **Query Builder**: Pythonic interface for building TypeQL queries - **Multi-player Roles**: A single role can accept multiple entity types via `Role.multi(...)` diff --git a/docs/guide/crud.md b/docs/guide/crud.md index f49f8fb..f12225c 100644 --- a/docs/guide/crud.md +++ b/docs/guide/crud.md @@ -1532,6 +1532,113 @@ def test_database_operations(): mock_driver.databases.contains.assert_called_with("test_db") ``` +## Lifecycle Hooks + +Hooks let you react to CRUD events for cross-cutting concerns — audit logging, input validation, cache invalidation, auto-populating fields, and more. + +### Quick Example + +```python +from type_bridge import CrudEvent, HookCancelled + +class AuditHook: + """Log every write operation.""" + + def post_insert(self, sender, instance): + print(f"[insert] {sender.__name__} iid={instance.iid}") + + def post_update(self, sender, instance): + print(f"[update] {sender.__name__} iid={instance.iid}") + + def post_delete(self, sender, instance): + print(f"[delete] {sender.__name__} iid={instance.iid}") + + +manager = Person.manager(db) +manager.add_hook(AuditHook()) # chainable — returns self +``` + +### Events + +The `CrudEvent` enum covers all eight lifecycle points: + +| Event | When it fires | +|-------|---------------| +| `PRE_INSERT` | Before inserting an entity/relation | +| `POST_INSERT` | After a successful insert | +| `PRE_UPDATE` | Before updating | +| `POST_UPDATE` | After a successful update | +| `PRE_DELETE` | Before deleting | +| `POST_DELETE` | After a successful delete | +| `PRE_PUT` | Before an idempotent put (upsert) | +| `POST_PUT` | After a successful put | + +### Writing a Hook + +Hooks are **duck-typed** — implement only the methods you need. No base class required. + +```python +class TimestampHook: + """Auto-populate created_at on insert.""" + + def pre_insert(self, sender, instance): + if hasattr(instance, "created_at") and instance.created_at is None: + instance.created_at = CreatedAt(datetime.now(timezone.utc)) +``` + +### Cancelling Operations + +Raise `HookCancelled` in any pre-hook to abort the operation: + +```python +class EmailDomainValidator: + def __init__(self, domain: str): + self.domain = domain + + def pre_insert(self, sender, instance): + if hasattr(instance, "email"): + if not instance.email.value.endswith(f"@{self.domain}"): + raise HookCancelled(f"Email must end with @{self.domain}") + + def pre_update(self, sender, instance): + self.pre_insert(sender, instance) # same logic +``` + +### Filtering with `should_run` + +Implement `should_run(event, sender)` to restrict when a hook fires: + +```python +class PersonOnlyHook: + def should_run(self, event, sender): + return sender.__name__ == "Person" + + def post_insert(self, sender, instance): + print(f"New person: {instance}") +``` + +Without `should_run`, hooks run for every event on every model. + +### Registration and Composition + +```python +manager = ( + Person.manager(db) + .add_hook(TimestampHook()) + .add_hook(EmailDomainValidator("company.com")) + .add_hook(AuditHook()) +) + +# Remove a hook later +manager.remove_hook(audit_hook) +``` + +### Execution Order + +- **Pre-hooks** run in registration order. If any raises `HookCancelled`, the operation is aborted. +- **Post-hooks** run in **reverse** registration order (middleware unwinding). Post-hook errors are logged but do not propagate. +- **Zero overhead** when no hooks are registered — the runner short-circuits on an empty hook list. + ## See Also - [Entities](entities.md) - Entity definition diff --git a/docs/guide/index.md b/docs/guide/index.md index a280db4..316dc34 100644 --- a/docs/guide/index.md +++ b/docs/guide/index.md @@ -58,6 +58,9 @@ alice = Person(name=Name("Alice"), age=Age(30)) person_manager = Person.manager(db) person_manager.insert(alice) persons = person_manager.all() + +# 5. Add lifecycle hooks (optional) +person_manager.add_hook(my_audit_hook) # chainable ``` ## Key Principles diff --git a/pyproject.toml b/pyproject.toml index 8b75b99..269845e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "hatchling.build" [project] name = "type-bridge" -version = "1.4.0" +version = "1.4.1" description = "A modern, Pythonic ORM for TypeDB with an Attribute-based API" readme = "README.md" requires-python = ">=3.13" @@ -29,7 +29,7 @@ dependencies = [ "lark>=1.1.9", "jinja2>=3.1.0", "typer>=0.15.0", - "type-bridge-core>=1.4.0", + "type-bridge-core>=1.4.1", ] [project.urls] diff --git a/tests/unit/crud/test_hooks.py b/tests/unit/crud/test_hooks.py new file mode 100644 index 0000000..767e43d --- /dev/null +++ b/tests/unit/crud/test_hooks.py @@ -0,0 +1,594 @@ +"""Unit tests for the lifecycle hook system.""" + +from __future__ import annotations + +import logging +from typing import Any, cast + +import pytest +from typedb.driver import TransactionType + +from type_bridge import Entity, Flag, Integer, Key, String, TypeFlags +from type_bridge.crud.hooks import CrudEvent, HookCancelled, HookRunner +from type_bridge.crud.typedb_manager import TypeDBManager +from type_bridge.session import Database + +# --------------------------------------------------------------------------- +# Test helpers +# --------------------------------------------------------------------------- + + +class _RecordingTypeDBManager(TypeDBManager): + """TypeDBManager that records executed queries instead of hitting TypeDB.""" + + def __init__(self, model_class: type): + super().__init__(cast(Database, object()), model_class) + self.queries: list[str] = [] + + def _execute(self, query: str, tx_type: TransactionType) -> list[dict[str, Any]]: + self.queries.append(query) + return [] + + +class _RecordingHook: + """Hook that records all calls for testing.""" + + def __init__(self) -> None: + self.calls: list[tuple[str, type, Any]] = [] + + def pre_insert(self, sender: type, instance: Any) -> None: + self.calls.append(("pre_insert", sender, instance)) + + def post_insert(self, sender: type, instance: Any) -> None: + self.calls.append(("post_insert", sender, instance)) + + def pre_update(self, sender: type, instance: Any) -> None: + self.calls.append(("pre_update", sender, instance)) + + def post_update(self, sender: type, instance: Any) -> None: + self.calls.append(("post_update", sender, instance)) + + def pre_delete(self, sender: type, instance: Any) -> None: + self.calls.append(("pre_delete", sender, instance)) + + def post_delete(self, sender: type, instance: Any) -> None: + self.calls.append(("post_delete", sender, instance)) + + def pre_put(self, sender: type, instance: Any) -> None: + self.calls.append(("pre_put", sender, instance)) + + def post_put(self, sender: type, instance: Any) -> None: + self.calls.append(("post_put", sender, instance)) + + +class _Name(String): + pass + + +class _Age(Integer): + pass + + +class _Person(Entity): + flags = TypeFlags(name="hook_test_person") + name: _Name = Flag(Key) + age: _Age | None = None + + +# ============================================================================ +# HookRunner isolation tests +# ============================================================================ + + +class TestHookRunnerRegistration: + def test_no_hooks_initially(self): + runner = HookRunner() + assert runner.has_hooks is False + + def test_add_hook(self): + runner = HookRunner() + hook = _RecordingHook() + runner.add(hook) + assert runner.has_hooks is True + + def test_remove_hook(self): + runner = HookRunner() + hook = _RecordingHook() + runner.add(hook) + runner.remove(hook) + assert runner.has_hooks is False + + def test_remove_nonexistent_hook_raises(self): + runner = HookRunner() + with pytest.raises(ValueError): + runner.remove(_RecordingHook()) + + +class TestHookRunnerPreHooks: + def test_pre_hook_called(self): + runner = HookRunner() + hook = _RecordingHook() + runner.add(hook) + + alice = _Person(name=_Name("Alice")) + runner.run_pre(CrudEvent.PRE_INSERT, _Person, alice) + + assert len(hook.calls) == 1 + assert hook.calls[0] == ("pre_insert", _Person, alice) + + def test_pre_hooks_run_in_registration_order(self): + runner = HookRunner() + order: list[str] = [] + + class HookA: + def pre_insert(self, sender: type, instance: Any) -> None: + order.append("A") + + class HookB: + def pre_insert(self, sender: type, instance: Any) -> None: + order.append("B") + + runner.add(HookA()) + runner.add(HookB()) + + runner.run_pre(CrudEvent.PRE_INSERT, _Person, _Person(name=_Name("Alice"))) + assert order == ["A", "B"] + + def test_pre_hook_cancellation(self): + runner = HookRunner() + + class CancellingHook: + def pre_insert(self, sender: type, instance: Any) -> None: + raise HookCancelled("not allowed") + + runner.add(CancellingHook()) + + with pytest.raises(HookCancelled, match="not allowed") as exc_info: + runner.run_pre(CrudEvent.PRE_INSERT, _Person, _Person(name=_Name("Alice"))) + + assert exc_info.value.event == CrudEvent.PRE_INSERT + + def test_cancellation_stops_subsequent_hooks(self): + runner = HookRunner() + called = [] + + class CancellingHook: + def pre_insert(self, sender: type, instance: Any) -> None: + called.append("cancel") + raise HookCancelled("stop") + + class SecondHook: + def pre_insert(self, sender: type, instance: Any) -> None: + called.append("second") + + runner.add(CancellingHook()) + runner.add(SecondHook()) + + with pytest.raises(HookCancelled): + runner.run_pre(CrudEvent.PRE_INSERT, _Person, _Person(name=_Name("Alice"))) + + assert called == ["cancel"] + + def test_pre_hook_can_mutate_instance(self): + runner = HookRunner() + + class MutatingHook: + def pre_insert(self, sender: type, instance: Any) -> None: + instance.age = _Age(99) + + runner.add(MutatingHook()) + + alice = _Person(name=_Name("Alice")) + runner.run_pre(CrudEvent.PRE_INSERT, _Person, alice) + + assert alice.age is not None + assert alice.age.value == 99 + + +class TestHookRunnerPostHooks: + def test_post_hook_called(self): + runner = HookRunner() + hook = _RecordingHook() + runner.add(hook) + + alice = _Person(name=_Name("Alice")) + runner.run_post(CrudEvent.POST_INSERT, _Person, alice) + + assert len(hook.calls) == 1 + assert hook.calls[0] == ("post_insert", _Person, alice) + + def test_post_hooks_run_in_reverse_order(self): + runner = HookRunner() + order: list[str] = [] + + class HookA: + def post_insert(self, sender: type, instance: Any) -> None: + order.append("A") + + class HookB: + def post_insert(self, sender: type, instance: Any) -> None: + order.append("B") + + runner.add(HookA()) + runner.add(HookB()) + + runner.run_post(CrudEvent.POST_INSERT, _Person, _Person(name=_Name("Alice"))) + assert order == ["B", "A"] + + def test_post_hook_error_logged_not_propagated(self, caplog: pytest.LogCaptureFixture): + runner = HookRunner() + + class FailingHook: + def post_insert(self, sender: type, instance: Any) -> None: + raise RuntimeError("boom") + + runner.add(FailingHook()) + + with caplog.at_level(logging.ERROR, logger="type_bridge.crud.hooks"): + runner.run_post(CrudEvent.POST_INSERT, _Person, _Person(name=_Name("Alice"))) + + assert "boom" in caplog.text + + def test_post_hook_error_does_not_prevent_other_hooks(self, caplog: pytest.LogCaptureFixture): + runner = HookRunner() + called: list[str] = [] + + class FailingHook: + def post_insert(self, sender: type, instance: Any) -> None: + called.append("failing") + raise RuntimeError("boom") + + class GoodHook: + def post_insert(self, sender: type, instance: Any) -> None: + called.append("good") + + # Reverse order: GoodHook added first runs second, FailingHook added second runs first + runner.add(GoodHook()) + runner.add(FailingHook()) + + with caplog.at_level(logging.ERROR, logger="type_bridge.crud.hooks"): + runner.run_post(CrudEvent.POST_INSERT, _Person, _Person(name=_Name("Alice"))) + + # FailingHook runs first (reverse order), GoodHook second + assert called == ["failing", "good"] + + +class TestHookRunnerShouldRun: + def test_should_run_false_skips_hook(self): + runner = HookRunner() + called = False + + class FilteredHook: + def should_run(self, event: CrudEvent, sender: type) -> bool: + return False + + def pre_insert(self, sender: type, instance: Any) -> None: + nonlocal called + called = True + + runner.add(FilteredHook()) + runner.run_pre(CrudEvent.PRE_INSERT, _Person, _Person(name=_Name("Alice"))) + assert called is False + + def test_should_run_filters_by_event(self): + runner = HookRunner() + hook = _RecordingHook() + + class InsertOnlyHook(_RecordingHook): + def should_run(self, event: CrudEvent, sender: type) -> bool: + return event in (CrudEvent.PRE_INSERT, CrudEvent.POST_INSERT) + + filtered = InsertOnlyHook() + runner.add(filtered) + + runner.run_pre(CrudEvent.PRE_INSERT, _Person, _Person(name=_Name("A"))) + runner.run_pre(CrudEvent.PRE_UPDATE, _Person, _Person(name=_Name("B"))) + + assert len(filtered.calls) == 1 + assert filtered.calls[0][0] == "pre_insert" + + def test_should_run_filters_by_sender(self): + runner = HookRunner() + + class AnotherName(String): + pass + + class Cat(Entity): + flags = TypeFlags(name="hook_test_cat") + name: AnotherName = Flag(Key) + + class PersonOnlyHook(_RecordingHook): + def should_run(self, event: CrudEvent, sender: type) -> bool: + return sender is _Person + + filtered = PersonOnlyHook() + runner.add(filtered) + + runner.run_pre(CrudEvent.PRE_INSERT, _Person, _Person(name=_Name("A"))) + runner.run_pre(CrudEvent.PRE_INSERT, Cat, Cat(name=AnotherName("Whiskers"))) + + assert len(filtered.calls) == 1 + + def test_hook_without_should_run_always_runs(self): + runner = HookRunner() + hook = _RecordingHook() + runner.add(hook) + + runner.run_pre(CrudEvent.PRE_INSERT, _Person, _Person(name=_Name("A"))) + runner.run_pre(CrudEvent.PRE_UPDATE, _Person, _Person(name=_Name("B"))) + runner.run_pre(CrudEvent.PRE_DELETE, _Person, _Person(name=_Name("C"))) + + assert len(hook.calls) == 3 + + +class TestHookRunnerEdgeCases: + def test_empty_hook_class_is_harmless(self): + runner = HookRunner() + runner.add(object()) # no hook methods at all + + # Should not raise + runner.run_pre(CrudEvent.PRE_INSERT, _Person, _Person(name=_Name("A"))) + runner.run_post(CrudEvent.POST_INSERT, _Person, _Person(name=_Name("A"))) + + def test_partial_hook_only_post_insert(self): + runner = HookRunner() + called = False + + class PostOnlyHook: + def post_insert(self, sender: type, instance: Any) -> None: + nonlocal called + called = True + + runner.add(PostOnlyHook()) + + # pre_insert should not call post_insert + runner.run_pre(CrudEvent.PRE_INSERT, _Person, _Person(name=_Name("A"))) + assert called is False + + # post_insert should + runner.run_post(CrudEvent.POST_INSERT, _Person, _Person(name=_Name("A"))) + assert called is True + + +# ============================================================================ +# TypeDBManager hook integration tests +# ============================================================================ + + +class TestManagerHookRegistration: + def test_add_hook_returns_self(self): + mgr = _RecordingTypeDBManager(_Person) + result = mgr.add_hook(_RecordingHook()) + assert result is mgr + + def test_add_hook_chaining(self): + mgr = _RecordingTypeDBManager(_Person) + hook1 = _RecordingHook() + hook2 = _RecordingHook() + result = mgr.add_hook(hook1).add_hook(hook2) + assert result is mgr + + def test_remove_hook(self): + mgr = _RecordingTypeDBManager(_Person) + hook = _RecordingHook() + mgr.add_hook(hook) + mgr.remove_hook(hook) + # No hooks, insert should not trigger any hook calls + alice = _Person(name=_Name("Alice")) + mgr.insert(alice) + assert hook.calls == [] + + +class TestManagerInsertHooks: + def test_insert_fires_pre_and_post(self): + hook = _RecordingHook() + mgr = _RecordingTypeDBManager(_Person) + mgr.add_hook(hook) + + alice = _Person(name=_Name("Alice")) + mgr.insert(alice) + + assert len(hook.calls) == 2 + assert hook.calls[0] == ("pre_insert", _Person, alice) + assert hook.calls[1] == ("post_insert", _Person, alice) + + def test_insert_pre_hook_cancellation_prevents_query(self): + class CancelHook: + def pre_insert(self, sender: type, instance: Any) -> None: + raise HookCancelled("nope") + + mgr = _RecordingTypeDBManager(_Person) + mgr.add_hook(CancelHook()) + + with pytest.raises(HookCancelled, match="nope"): + mgr.insert(_Person(name=_Name("Alice"))) + + # No queries should have been executed + assert mgr.queries == [] + + +class TestManagerUpdateHooks: + def test_update_fires_pre_and_post(self): + hook = _RecordingHook() + mgr = _RecordingTypeDBManager(_Person) + mgr.add_hook(hook) + + alice = _Person(name=_Name("Alice")) + mgr.update(alice) + + assert len(hook.calls) == 2 + assert hook.calls[0] == ("pre_update", _Person, alice) + assert hook.calls[1] == ("post_update", _Person, alice) + + +class TestManagerDeleteHooks: + def test_delete_fires_pre_and_post(self): + hook = _RecordingHook() + mgr = _RecordingTypeDBManager(_Person) + mgr.add_hook(hook) + + alice = _Person(name=_Name("Alice")) + mgr.delete(alice) + + assert len(hook.calls) == 2 + assert hook.calls[0] == ("pre_delete", _Person, alice) + assert hook.calls[1] == ("post_delete", _Person, alice) + + +class TestManagerPutHooks: + def test_put_fires_pre_and_post(self): + hook = _RecordingHook() + mgr = _RecordingTypeDBManager(_Person) + mgr.add_hook(hook) + + alice = _Person(name=_Name("Alice")) + mgr.put(alice) + + assert len(hook.calls) == 2 + assert hook.calls[0] == ("pre_put", _Person, alice) + assert hook.calls[1] == ("post_put", _Person, alice) + + +class TestManagerBatchHooks: + def test_insert_many_fires_hooks_per_instance(self): + hook = _RecordingHook() + mgr = _RecordingTypeDBManager(_Person) + mgr.add_hook(hook) + + alice = _Person(name=_Name("Alice")) + bob = _Person(name=_Name("Bob")) + mgr.insert_many([alice, bob]) + + # Pre-hooks in order, then post-hooks in order + pre_calls = [c for c in hook.calls if c[0] == "pre_insert"] + post_calls = [c for c in hook.calls if c[0] == "post_insert"] + + assert len(pre_calls) == 2 + assert pre_calls[0][2] is alice + assert pre_calls[1][2] is bob + + assert len(post_calls) == 2 + assert post_calls[0][2] is alice + assert post_calls[1][2] is bob + + def test_insert_many_cancellation_prevents_all(self): + call_count = 0 + + class CancelOnSecond: + def pre_insert(self, sender: type, instance: Any) -> None: + nonlocal call_count + call_count += 1 + if call_count == 2: + raise HookCancelled("stop at second") + + mgr = _RecordingTypeDBManager(_Person) + mgr.add_hook(CancelOnSecond()) + + with pytest.raises(HookCancelled, match="stop at second"): + mgr.insert_many([_Person(name=_Name("A")), _Person(name=_Name("B"))]) + + # No queries should have been executed (pre-hooks run before any DB op) + assert mgr.queries == [] + + def test_update_many_fires_hooks_per_instance(self): + """update_many delegates to update(), which fires hooks individually.""" + hook = _RecordingHook() + mgr = _RecordingTypeDBManager(_Person) + mgr.add_hook(hook) + + alice = _Person(name=_Name("Alice")) + bob = _Person(name=_Name("Bob")) + mgr.update_many([alice, bob]) + + pre_calls = [c for c in hook.calls if c[0] == "pre_update"] + post_calls = [c for c in hook.calls if c[0] == "post_update"] + assert len(pre_calls) == 2 + assert len(post_calls) == 2 + + +class TestManagerPreHookMutation: + def test_pre_insert_mutation_before_query(self): + """Pre-hook mutation should affect the instance before query generation.""" + + class SetAgeHook: + def pre_insert(self, sender: type, instance: Any) -> None: + instance.age = _Age(42) + + mgr = _RecordingTypeDBManager(_Person) + mgr.add_hook(SetAgeHook()) + + alice = _Person(name=_Name("Alice")) + assert alice.age is None + + mgr.insert(alice) + assert alice.age is not None + assert alice.age.value == 42 + + +class TestZeroOverhead: + def test_no_hooks_has_hooks_false(self): + mgr = _RecordingTypeDBManager(_Person) + assert mgr._hook_runner.has_hooks is False + + def test_insert_works_without_hooks(self): + """Insert should work normally when no hooks are registered.""" + mgr = _RecordingTypeDBManager(_Person) + alice = _Person(name=_Name("Alice")) + result = mgr.insert(alice) + assert result is alice + assert len(mgr.queries) > 0 + + +# ============================================================================ +# CrudEvent and HookCancelled tests +# ============================================================================ + + +class TestCrudEvent: + def test_all_events_have_correct_values(self): + assert CrudEvent.PRE_INSERT.value == "pre_insert" + assert CrudEvent.POST_INSERT.value == "post_insert" + assert CrudEvent.PRE_UPDATE.value == "pre_update" + assert CrudEvent.POST_UPDATE.value == "post_update" + assert CrudEvent.PRE_DELETE.value == "pre_delete" + assert CrudEvent.POST_DELETE.value == "post_delete" + assert CrudEvent.PRE_PUT.value == "pre_put" + assert CrudEvent.POST_PUT.value == "post_put" + + def test_eight_events(self): + assert len(CrudEvent) == 8 + + +class TestHookCancelled: + def test_reason(self): + exc = HookCancelled("bad data") + assert exc.reason == "bad data" + assert str(exc) == "bad data" + + def test_context_attributes(self): + hook = object() + exc = HookCancelled("stop", event=CrudEvent.PRE_INSERT, hook=hook) + assert exc.event == CrudEvent.PRE_INSERT + assert exc.hook is hook + + def test_defaults(self): + exc = HookCancelled() + assert exc.reason == "" + assert exc.event is None + assert exc.hook is None + + def test_enriched_by_runner(self): + runner = HookRunner() + + class MyHook: + def pre_insert(self, sender: type, instance: Any) -> None: + raise HookCancelled("no") + + my_hook = MyHook() + runner.add(my_hook) + + with pytest.raises(HookCancelled) as exc_info: + runner.run_pre(CrudEvent.PRE_INSERT, _Person, _Person(name=_Name("A"))) + + assert exc_info.value.event == CrudEvent.PRE_INSERT + assert exc_info.value.hook is my_hook diff --git a/type-bridge-core/Cargo.lock b/type-bridge-core/Cargo.lock index 5262e6d..a04ac24 100644 --- a/type-bridge-core/Cargo.lock +++ b/type-bridge-core/Cargo.lock @@ -2117,7 +2117,7 @@ checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "type-bridge-core" -version = "1.4.0" +version = "1.4.1" dependencies = [ "pyo3", "pythonize", @@ -2128,7 +2128,7 @@ dependencies = [ [[package]] name = "type-bridge-core-lib" -version = "1.4.0" +version = "1.4.1" dependencies = [ "criterion", "once_cell", @@ -2143,7 +2143,7 @@ dependencies = [ [[package]] name = "type-bridge-orm" -version = "1.4.0" +version = "1.4.1" dependencies = [ "criterion", "futures", @@ -2159,7 +2159,7 @@ dependencies = [ [[package]] name = "type-bridge-orm-derive" -version = "1.4.0" +version = "1.4.1" dependencies = [ "proc-macro2", "quote", @@ -2169,12 +2169,11 @@ dependencies = [ [[package]] name = "type-bridge-server" -version = "1.4.0" +version = "1.4.1" dependencies = [ "axum 0.8.8", "chrono", "clap", - "criterion", "futures", "http-body-util", "serde", @@ -2188,6 +2187,7 @@ dependencies = [ "tracing", "tracing-subscriber", "type-bridge-core-lib", + "type-bridge-server", "typedb-driver", "uuid", ] diff --git a/type-bridge-core/crates/core/Cargo.toml b/type-bridge-core/crates/core/Cargo.toml index e302dbd..03ba2f5 100644 --- a/type-bridge-core/crates/core/Cargo.toml +++ b/type-bridge-core/crates/core/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "type-bridge-core-lib" -version = "1.4.0" +version = "1.4.1" edition = "2024" description = "TypeQL AST, schema parser, query compiler, and validation engine for type-bridge" license.workspace = true diff --git a/type-bridge-core/crates/core/src/ast.rs b/type-bridge-core/crates/core/src/ast.rs index c20b8b1..3158f14 100644 --- a/type-bridge-core/crates/core/src/ast.rs +++ b/type-bridge-core/crates/core/src/ast.rs @@ -235,6 +235,11 @@ pub enum Clause { MatchLet(Vec), /// An insert clause containing statements that create new data. Insert(Vec), + /// A put clause containing statements for idempotent insert (upsert). + /// + /// Semantics: insert if not exists, otherwise return the existing match. + /// Uses the same statement syntax as `Insert`. + Put(Vec), /// A delete clause containing statements that remove existing data. Delete(Vec), /// An update clause containing statements that modify existing data. diff --git a/type-bridge-core/crates/core/src/compiler.rs b/type-bridge-core/crates/core/src/compiler.rs index 1d0b35f..80b8ee5 100644 --- a/type-bridge-core/crates/core/src/compiler.rs +++ b/type-bridge-core/crates/core/src/compiler.rs @@ -43,6 +43,13 @@ impl QueryCompiler { .join(";\n"); format!("insert\n{};", s_str) } + Clause::Put(statements) => { + let s_str = statements.iter() + .map(|s| self.compile_statement(s)) + .collect::>() + .join(";\n"); + format!("put\n{};", s_str) + } Clause::Delete(statements) => { let s_str = statements.iter() .map(|s| self.compile_statement(s)) @@ -660,6 +667,16 @@ mod tests { assert_eq!(c.compile_clause(&clause), "insert\n$p has name \"Alice\";"); } + #[test] + fn test_clause_put() { + let c = compiler(); + let clause = Clause::Put(vec![ + Statement::Isa { variable: "$p".into(), type_name: "person".into() }, + Statement::Has { subject_var: "$p".into(), attr_name: "name".into(), value: lit(json!("Alice"), "string") }, + ]); + assert_eq!(c.compile_clause(&clause), "put\n$p isa person;\n$p has name \"Alice\";"); + } + #[test] fn test_clause_delete() { let c = compiler(); @@ -856,6 +873,18 @@ mod tests { assert!(result.contains("insert\n")); } + #[test] + fn test_multi_clause_match_put() { + let c = compiler(); + let clauses = vec![ + Clause::Match(vec![Pattern::Entity { variable: "$p".into(), type_name: "person".into(), constraints: vec![], is_strict: false }]), + Clause::Put(vec![Statement::Has { subject_var: "$p".into(), attr_name: "age".into(), value: lit(json!(30), "long") }]), + ]; + let result = c.compile(&clauses); + assert!(result.starts_with("match\n")); + assert!(result.contains("put\n")); + } + #[test] fn test_multi_clause_match_delete() { let c = compiler(); @@ -968,6 +997,16 @@ mod tests { roundtrip("insert\n$r isa employment, links (employee: $p, employer: $c);"); } + #[test] + fn test_roundtrip_put() { + roundtrip("put\n$p isa person;\n$p has name \"Alice\";"); + } + + #[test] + fn test_roundtrip_match_put() { + roundtrip("match $p isa person, has name \"Alice\";\nput\n$p has age 30;"); + } + #[test] fn test_roundtrip_match_delete() { roundtrip("match $p isa person, has name \"Alice\";\ndelete $p;"); diff --git a/type-bridge-core/crates/core/src/query_parser.rs b/type-bridge-core/crates/core/src/query_parser.rs index c6ee19f..a927b71 100644 --- a/type-bridge-core/crates/core/src/query_parser.rs +++ b/type-bridge-core/crates/core/src/query_parser.rs @@ -590,6 +590,7 @@ fn parse_patterns(input: &mut &str) -> PResult> { ws_comments(input); if input.is_empty() || input.starts_with("insert") + || input.starts_with("put") || input.starts_with("delete") || input.starts_with("update") || input.starts_with("fetch") @@ -897,6 +898,7 @@ fn parse_statements(input: &mut &str, ctx: StmtContext) -> PResult PResult { alt(( parse_match_clause, parse_insert_clause, + parse_put_clause, parse_delete_clause, parse_update_clause, parse_fetch_clause, @@ -1259,6 +1262,14 @@ fn parse_insert_clause(input: &mut &str) -> PResult { Ok(Clause::Insert(stmts)) } +/// Parse a put clause: `put\n;`. +fn parse_put_clause(input: &mut &str) -> PResult { + literal("put").parse_next(input)?; + ws_comments(input); + let stmts = parse_statements(input, StmtContext::Insert)?; + Ok(Clause::Put(stmts)) +} + /// Parse a delete clause: `delete\n;`. fn parse_delete_clause(input: &mut &str) -> PResult { literal("delete").parse_next(input)?; @@ -2129,6 +2140,28 @@ mod tests { } } + #[test] + fn test_parse_put_clause() { + let input = "put\n$p isa person;\n$p has name \"Alice\";"; + let clauses = parse_typeql_query(input).unwrap(); + assert_eq!(clauses.len(), 1); + match &clauses[0] { + Clause::Put(stmts) => { + assert_eq!(stmts.len(), 2); + } + _ => panic!("expected Put"), + } + } + + #[test] + fn test_parse_match_put() { + let input = "match\n$p isa person;\nput\n$p has age 30;"; + let clauses = parse_typeql_query(input).unwrap(); + assert_eq!(clauses.len(), 2); + assert!(matches!(&clauses[0], Clause::Match(_))); + assert!(matches!(&clauses[1], Clause::Put(_))); + } + #[test] fn test_parse_delete_clause() { let input = "delete\n$p;"; diff --git a/type-bridge-core/crates/core/src/validation.rs b/type-bridge-core/crates/core/src/validation.rs index b885c11..ccfa5a9 100644 --- a/type-bridge-core/crates/core/src/validation.rs +++ b/type-bridge-core/crates/core/src/validation.rs @@ -654,7 +654,7 @@ impl ValidationEngine { ); } } - Clause::Insert(stmts) => { + Clause::Insert(stmts) | Clause::Put(stmts) => { self.validate_insert_stmts(stmts, schema, env, path, errors); } Clause::Delete(stmts) | Clause::Update(stmts) => { @@ -2161,6 +2161,44 @@ mod schema_validation_tests { assert!(!result.is_valid); assert!(result.errors.iter().any(|e| e.code == "UNKNOWN_ATTRIBUTE_OWNERSHIP")); } + + // -- Put clause validation (same semantics as Insert) -------------------- + + #[test] + fn test_put_valid() { + let engine = ValidationEngine::new(); + let schema = build_test_schema(); + let clauses = vec![Clause::Put(vec![ + Statement::Isa { variable: "$p".into(), type_name: "person".into() }, + Statement::Has { subject_var: "$p".into(), attr_name: "name".into(), value: Value::Literal(LiteralValue { value: json!("Alice"), value_type: "string".into() }) }, + ])]; + let result = engine.validate_query(&clauses, &schema); + assert!(result.is_valid); + } + + #[test] + fn test_put_unknown_type() { + let engine = ValidationEngine::new(); + let schema = build_test_schema(); + let clauses = vec![Clause::Put(vec![ + Statement::Isa { variable: "$x".into(), type_name: "spaceship".into() }, + ])]; + let result = engine.validate_query(&clauses, &schema); + assert!(!result.is_valid); + assert!(result.errors.iter().any(|e| e.code == "UNKNOWN_TYPE")); + } + + #[test] + fn test_put_abstract_type() { + let engine = ValidationEngine::new(); + let schema = build_test_schema(); + let clauses = vec![Clause::Put(vec![ + Statement::Isa { variable: "$a".into(), type_name: "animal".into() }, + ])]; + let result = engine.validate_query(&clauses, &schema); + assert!(!result.is_valid); + assert!(result.errors.iter().any(|e| e.code == "ABSTRACT_TYPE_INSTANTIATION")); + } } #[cfg(test)] @@ -2398,4 +2436,5 @@ mod rule_tests { assert!(engine.validate_type_name("person", "entity").is_valid); assert!(!engine.validate_type_name("define", "entity").is_valid); } + } diff --git a/type-bridge-core/crates/orm-derive/Cargo.toml b/type-bridge-core/crates/orm-derive/Cargo.toml index 1a071d4..02fe7e2 100644 --- a/type-bridge-core/crates/orm-derive/Cargo.toml +++ b/type-bridge-core/crates/orm-derive/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "type-bridge-orm-derive" -version = "1.4.0" +version = "1.4.1" edition = "2024" description = "Derive macros for type-bridge-orm: TypeBridgeEntity, TypeBridgeAttribute, TypeBridgeRelation" license.workspace = true @@ -14,7 +14,7 @@ proc-macro = true proc-macro2 = "1" quote = "1" syn = { version = "2", features = ["full", "extra-traits"] } -type-bridge-core-lib = { path = "../core", version = "1.4.0" } +type-bridge-core-lib = { path = "../core", version = "1.4.1" } [lints] workspace = true diff --git a/type-bridge-core/crates/orm/Cargo.toml b/type-bridge-core/crates/orm/Cargo.toml index e4e3895..ff96e0e 100644 --- a/type-bridge-core/crates/orm/Cargo.toml +++ b/type-bridge-core/crates/orm/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "type-bridge-orm" -version = "1.4.0" +version = "1.4.1" edition = "2024" description = "Async ORM for TypeDB built on type-bridge-core-lib" license.workspace = true @@ -21,13 +21,13 @@ typedb = ["dep:typedb-driver", "dep:futures"] derive = ["dep:type-bridge-orm-derive"] [dependencies] -type-bridge-core-lib = { path = "../core", version = "1.4.0" } +type-bridge-core-lib = { path = "../core", version = "1.4.1" } tokio = { version = "1", features = ["full"] } serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" thiserror = "2" tracing = "0.1" -type-bridge-orm-derive = { path = "../orm-derive", version = "1.4.0", optional = true } +type-bridge-orm-derive = { path = "../orm-derive", version = "1.4.1", optional = true } # Optional: real TypeDB backend typedb-driver = { version = "3", optional = true } diff --git a/type-bridge-core/crates/orm/src/error.rs b/type-bridge-core/crates/orm/src/error.rs index 3c2b73d..a0a5034 100644 --- a/type-bridge-core/crates/orm/src/error.rs +++ b/type-bridge-core/crates/orm/src/error.rs @@ -45,6 +45,10 @@ pub enum OrmError { /// Serde JSON error. #[error("Serialization error: {0}")] Serialization(#[from] serde_json::Error), + + /// A lifecycle hook rejected or failed the operation. + #[error("Hook error: {0}")] + Hook(#[from] crate::hooks::HookError), } /// Convenience Result alias for ORM operations. diff --git a/type-bridge-core/crates/orm/src/hooks/context.rs b/type-bridge-core/crates/orm/src/hooks/context.rs new file mode 100644 index 0000000..4282b22 --- /dev/null +++ b/type-bridge-core/crates/orm/src/hooks/context.rs @@ -0,0 +1,43 @@ +//! Hook context types passed to lifecycle hooks. + +use std::collections::HashMap; +use std::time::SystemTime; + +use crate::value::AttributeValue; + +/// The CRUD operation being performed. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum CrudOperation { + Insert, + Update, + Delete, + Put, +} + +/// Whether the target is an entity or relation. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum TypeKind { + Entity, + Relation, +} + +/// Context passed to lifecycle hooks before and after CRUD operations. +/// +/// Pre-hooks receive `&mut HookContext` and may inspect attributes or +/// set metadata. Post-hooks receive `&HookContext` (read-only). +pub struct HookContext { + /// The TypeDB type name (e.g. `"person"`, `"employment"`). + pub type_name: &'static str, + /// Whether this is an entity or relation. + pub type_kind: TypeKind, + /// Which CRUD operation is being performed. + pub operation: CrudOperation, + /// Attribute name–value pairs of the instance. + pub attributes: Vec<(&'static str, AttributeValue)>, + /// IID if available (set after insert, or present for update/delete). + pub iid: Option, + /// Arbitrary user metadata for passing data between hooks. + pub metadata: HashMap, + /// When the operation started. + pub timestamp: SystemTime, +} diff --git a/type-bridge-core/crates/orm/src/hooks/error.rs b/type-bridge-core/crates/orm/src/hooks/error.rs new file mode 100644 index 0000000..f9d398a --- /dev/null +++ b/type-bridge-core/crates/orm/src/hooks/error.rs @@ -0,0 +1,25 @@ +//! Error types for lifecycle hooks. + +use thiserror::Error; + +use super::CrudOperation; + +/// Errors that can occur in lifecycle hooks. +#[derive(Debug, Error)] +pub enum HookError { + /// A pre-hook rejected the operation. + #[error("Hook '{hook_name}' rejected {operation:?}: {reason}")] + Rejected { + hook_name: String, + operation: CrudOperation, + reason: String, + }, + + /// A hook encountered an internal error. + #[error("Hook '{hook_name}' failed: {source}")] + Internal { + hook_name: String, + #[source] + source: Box, + }, +} diff --git a/type-bridge-core/crates/orm/src/hooks/mod.rs b/type-bridge-core/crates/orm/src/hooks/mod.rs new file mode 100644 index 0000000..f65fbdb --- /dev/null +++ b/type-bridge-core/crates/orm/src/hooks/mod.rs @@ -0,0 +1,16 @@ +//! Lifecycle hook system for CRUD operations. +//! +//! Hooks are registered on manager instances via +//! [`add_hook`](crate::manager::EntityManager::add_hook). +//! Pre-hooks run in registration order and may reject operations. +//! Post-hooks run in reverse order; errors are logged but not propagated. + +mod context; +mod error; +mod runner; +mod traits; + +pub use context::{CrudOperation, HookContext, TypeKind}; +pub use error::HookError; +pub use runner::HookRunner; +pub use traits::{LifecycleHook, PreHookResult}; diff --git a/type-bridge-core/crates/orm/src/hooks/runner.rs b/type-bridge-core/crates/orm/src/hooks/runner.rs new file mode 100644 index 0000000..3420f35 --- /dev/null +++ b/type-bridge-core/crates/orm/src/hooks/runner.rs @@ -0,0 +1,108 @@ +//! [`HookRunner`] — shared hook execution engine used by both managers. + +use std::collections::HashMap; +use std::sync::Arc; +use std::time::SystemTime; + +use crate::error::{OrmError, Result}; +use crate::value::AttributeValue; + +use super::context::{CrudOperation, HookContext, TypeKind}; +use super::error::HookError; +use super::traits::{LifecycleHook, PreHookResult}; + +/// Shared hook execution engine used by both +/// [`EntityManager`](crate::manager::EntityManager) and +/// [`RelationManager`](crate::manager::RelationManager). +/// +/// When no hooks are registered, all methods are effectively no-ops. +#[derive(Default, Clone)] +pub struct HookRunner { + hooks: Vec>, +} + +impl HookRunner { + /// Create a new empty runner. + pub fn new() -> Self { + Self::default() + } + + /// Register a hook. + pub fn add_hook(&mut self, hook: Arc) { + self.hooks.push(hook); + } + + /// Whether any hooks are registered. + pub fn has_hooks(&self) -> bool { + !self.hooks.is_empty() + } + + /// Build a [`HookContext`] for the given operation. + pub fn build_context( + type_name: &'static str, + type_kind: TypeKind, + operation: CrudOperation, + attributes: Vec<(&'static str, AttributeValue)>, + iid: Option, + ) -> HookContext { + HookContext { + type_name, + type_kind, + operation, + attributes, + iid, + metadata: HashMap::new(), + timestamp: SystemTime::now(), + } + } + + /// Run pre-hooks in registration order. + /// + /// Short-circuits on [`PreHookResult::Reject`], converting it to + /// [`OrmError::Hook`]. + pub async fn run_pre_hooks(&self, ctx: &mut HookContext) -> Result<()> { + for hook in &self.hooks { + if !hook.should_run(ctx) { + continue; + } + match hook.before_operation(ctx).await { + Ok(PreHookResult::Continue) => {} + Ok(PreHookResult::Reject { reason }) => { + return Err(OrmError::Hook(HookError::Rejected { + hook_name: hook.name().to_string(), + operation: ctx.operation, + reason, + })); + } + Err(e) => return Err(OrmError::Hook(e)), + } + } + Ok(()) + } + + /// Run post-hooks in reverse registration order. + /// + /// Errors are logged but do **not** propagate. + pub async fn run_post_hooks(&self, ctx: &HookContext) { + for hook in self.hooks.iter().rev() { + if !hook.should_run(ctx) { + continue; + } + if let Err(e) = hook.after_operation(ctx).await { + tracing::warn!( + hook = hook.name(), + error = %e, + "Post-hook error (non-fatal)" + ); + } + } + } +} + +impl std::fmt::Debug for HookRunner { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("HookRunner") + .field("hook_count", &self.hooks.len()) + .finish() + } +} diff --git a/type-bridge-core/crates/orm/src/hooks/traits.rs b/type-bridge-core/crates/orm/src/hooks/traits.rs new file mode 100644 index 0000000..c3cc1fb --- /dev/null +++ b/type-bridge-core/crates/orm/src/hooks/traits.rs @@ -0,0 +1,76 @@ +//! The [`LifecycleHook`] trait and [`PreHookResult`] enum. + +use crate::session::backend::BoxFuture; + +use super::context::HookContext; +use super::error::HookError; + +/// Result of a pre-operation hook. +#[derive(Debug)] +pub enum PreHookResult { + /// Continue with the operation. + Continue, + /// Reject the operation with a reason. + Reject { reason: String }, +} + +/// Trait for lifecycle hooks that run before and after CRUD operations. +/// +/// All hooks must be `Send + Sync` for use across async tasks. +/// Methods use [`BoxFuture`] for object-safe async. +/// +/// # Example +/// +/// ```ignore +/// struct AuditLogger; +/// +/// impl LifecycleHook for AuditLogger { +/// fn name(&self) -> &str { "audit-logger" } +/// +/// fn before_operation<'a>( +/// &'a self, +/// ctx: &'a mut HookContext, +/// ) -> BoxFuture<'a, Result> { +/// Box::pin(async move { +/// tracing::info!(type_name = ctx.type_name, "Before operation"); +/// Ok(PreHookResult::Continue) +/// }) +/// } +/// +/// fn after_operation<'a>( +/// &'a self, +/// ctx: &'a HookContext, +/// ) -> BoxFuture<'a, Result<(), HookError>> { +/// Box::pin(async move { +/// tracing::info!(type_name = ctx.type_name, "After operation"); +/// Ok(()) +/// }) +/// } +/// } +/// ``` +pub trait LifecycleHook: Send + Sync { + /// Human-readable name for this hook (used in logs and errors). + fn name(&self) -> &str; + + /// Called before a CRUD operation. + /// + /// Can inspect/modify the context or reject the operation entirely. + fn before_operation<'a>( + &'a self, + ctx: &'a mut HookContext, + ) -> BoxFuture<'a, Result>; + + /// Called after a successful CRUD operation. + fn after_operation<'a>( + &'a self, + ctx: &'a HookContext, + ) -> BoxFuture<'a, Result<(), HookError>>; + + /// Return `false` to skip this hook for the given context. + /// + /// Defaults to always running. + fn should_run(&self, ctx: &HookContext) -> bool { + let _ = ctx; + true + } +} diff --git a/type-bridge-core/crates/orm/src/lib.rs b/type-bridge-core/crates/orm/src/lib.rs index 4c52d27..ff02511 100644 --- a/type-bridge-core/crates/orm/src/lib.rs +++ b/type-bridge-core/crates/orm/src/lib.rs @@ -39,6 +39,7 @@ pub mod error; pub mod expr; pub mod field_ref; pub mod filter; +pub mod hooks; pub mod manager; pub mod query; pub mod relation; @@ -59,6 +60,7 @@ pub use session::{Database, Transaction, TransactionContext, TxType}; pub use schema::{SchemaDiff, SchemaInfo, SchemaManager}; pub use value::AttributeValue; pub use field_ref::{FieldRef, RolePlayerFieldRef, RoleRef}; +pub use hooks::{CrudOperation, HookContext, HookError, HookRunner, LifecycleHook, PreHookResult, TypeKind}; // Re-export derive macros when the `derive` feature is enabled. #[cfg(feature = "derive")] diff --git a/type-bridge-core/crates/orm/src/manager/entity_manager.rs b/type-bridge-core/crates/orm/src/manager/entity_manager.rs index 8271b09..2b6fd99 100644 --- a/type-bridge-core/crates/orm/src/manager/entity_manager.rs +++ b/type-bridge-core/crates/orm/src/manager/entity_manager.rs @@ -4,10 +4,12 @@ //! operations backed by the session layer. use std::marker::PhantomData; +use std::sync::Arc; use crate::entity::TypeBridgeEntity; use crate::error::{OrmError, Result}; use crate::filter::Filter; +use crate::hooks::{CrudOperation, HookRunner, LifecycleHook, TypeKind}; use crate::query::EntityQuery; use crate::session::backend::{QueryResult, TxType}; use crate::session::Database; @@ -29,6 +31,7 @@ use super::query_builder; /// ``` pub struct EntityManager<'db, T: TypeBridgeEntity> { db: &'db Database, + hooks: HookRunner, _marker: PhantomData, } @@ -37,20 +40,41 @@ impl<'db, T: TypeBridgeEntity> EntityManager<'db, T> { pub fn new(db: &'db Database) -> Self { Self { db, + hooks: HookRunner::new(), _marker: PhantomData, } } + /// Register a lifecycle hook. + /// + /// Hooks run in registration order for pre-hooks and reverse order + /// for post-hooks. Returns `&mut Self` for chaining. + pub fn add_hook(&mut self, hook: Arc) -> &mut Self { + self.hooks.add_hook(hook); + self + } + /// Insert an entity and return the assigned IID. /// /// The entity's IID is also set in-place via [`TypeBridgeEntity::set_iid`]. #[tracing::instrument(skip(self, entity), fields(entity_type = T::TYPE_NAME))] pub async fn insert(&self, entity: &mut T) -> Result { + if self.hooks.has_hooks() { + let mut ctx = HookRunner::build_context( + T::TYPE_NAME, + TypeKind::Entity, + CrudOperation::Insert, + entity.to_attribute_values(), + entity.iid().map(String::from), + ); + self.hooks.run_pre_hooks(&mut ctx).await?; + } + let typeql = query_builder::build_insert_with_iid::(entity, "$e")?; tracing::debug!(typeql = %typeql, entity_type = T::TYPE_NAME, "INSERT"); let result = self.db.execute_raw(&typeql, TxType::Write).await?; - match result { + let iid = match result { QueryResult::Documents(docs) => { let doc = docs.first().ok_or_else(|| OrmError::Hydration { type_name: T::TYPE_NAME.into(), @@ -77,7 +101,20 @@ impl<'db, T: TypeBridgeEntity> EntityManager<'db, T> { type_name: T::TYPE_NAME.into(), message: "Expected Documents from insert+fetch, got Rows".into(), }), + }?; + + if self.hooks.has_hooks() { + let ctx = HookRunner::build_context( + T::TYPE_NAME, + TypeKind::Entity, + CrudOperation::Insert, + entity.to_attribute_values(), + Some(iid.clone()), + ); + self.hooks.run_post_hooks(&ctx).await; } + + Ok(iid) } /// Fetch entities matching the given filters. @@ -133,9 +170,32 @@ impl<'db, T: TypeBridgeEntity> EntityManager<'db, T> { /// attributes for matching. #[tracing::instrument(skip(self, entity), fields(entity_type = T::TYPE_NAME))] pub async fn delete(&self, entity: &T) -> Result<()> { + if self.hooks.has_hooks() { + let mut ctx = HookRunner::build_context( + T::TYPE_NAME, + TypeKind::Entity, + CrudOperation::Delete, + entity.to_attribute_values(), + entity.iid().map(String::from), + ); + self.hooks.run_pre_hooks(&mut ctx).await?; + } + let typeql = query_builder::build_delete::(entity, "$e")?; tracing::debug!(typeql = %typeql, entity_type = T::TYPE_NAME, "DELETE"); self.db.execute_raw(&typeql, TxType::Write).await?; + + if self.hooks.has_hooks() { + let ctx = HookRunner::build_context( + T::TYPE_NAME, + TypeKind::Entity, + CrudOperation::Delete, + entity.to_attribute_values(), + entity.iid().map(String::from), + ); + self.hooks.run_post_hooks(&ctx).await; + } + Ok(()) } @@ -151,9 +211,32 @@ impl<'db, T: TypeBridgeEntity> EntityManager<'db, T> { /// all other attribute values. Only non-key attributes are modified. #[tracing::instrument(skip(self, entity), fields(entity_type = T::TYPE_NAME))] pub async fn update(&self, entity: &T) -> Result<()> { + if self.hooks.has_hooks() { + let mut ctx = HookRunner::build_context( + T::TYPE_NAME, + TypeKind::Entity, + CrudOperation::Update, + entity.to_attribute_values(), + entity.iid().map(String::from), + ); + self.hooks.run_pre_hooks(&mut ctx).await?; + } + let typeql = query_builder::build_update::(entity, "$e")?; tracing::debug!(typeql = %typeql, entity_type = T::TYPE_NAME, "UPDATE"); self.db.execute_raw(&typeql, TxType::Write).await?; + + if self.hooks.has_hooks() { + let ctx = HookRunner::build_context( + T::TYPE_NAME, + TypeKind::Entity, + CrudOperation::Update, + entity.to_attribute_values(), + entity.iid().map(String::from), + ); + self.hooks.run_post_hooks(&ctx).await; + } + Ok(()) } @@ -163,11 +246,22 @@ impl<'db, T: TypeBridgeEntity> EntityManager<'db, T> { /// Returns the IID of the entity (existing or newly created). #[tracing::instrument(skip(self, entity), fields(entity_type = T::TYPE_NAME))] pub async fn put(&self, entity: &mut T) -> Result { + if self.hooks.has_hooks() { + let mut ctx = HookRunner::build_context( + T::TYPE_NAME, + TypeKind::Entity, + CrudOperation::Put, + entity.to_attribute_values(), + entity.iid().map(String::from), + ); + self.hooks.run_pre_hooks(&mut ctx).await?; + } + let typeql = query_builder::build_put::(entity, "$e")?; tracing::debug!(typeql = %typeql, entity_type = T::TYPE_NAME, "PUT"); let result = self.db.execute_raw(&typeql, TxType::Write).await?; - match result { + let iid = match result { QueryResult::Documents(docs) => { let doc = docs.first().ok_or_else(|| OrmError::Hydration { type_name: T::TYPE_NAME.into(), @@ -194,7 +288,20 @@ impl<'db, T: TypeBridgeEntity> EntityManager<'db, T> { type_name: T::TYPE_NAME.into(), message: "Expected Documents from put+fetch, got Rows".into(), }), + }?; + + if self.hooks.has_hooks() { + let ctx = HookRunner::build_context( + T::TYPE_NAME, + TypeKind::Entity, + CrudOperation::Put, + entity.to_attribute_values(), + Some(iid.clone()), + ); + self.hooks.run_post_hooks(&ctx).await; } + + Ok(iid) } /// Create a chainable query builder for this entity type. @@ -224,8 +331,24 @@ impl<'db, T: TypeBridgeEntity> EntityManager<'db, T> { /// Insert multiple entities in a single transaction. /// /// Each entity's IID is set in-place. Returns a vector of assigned IIDs. + /// Pre-hooks run for ALL entities before the transaction starts; if any + /// rejects, the entire batch aborts. Post-hooks run after commit. #[tracing::instrument(skip(self, entities), fields(entity_type = T::TYPE_NAME, count = entities.len()))] pub async fn insert_many(&self, entities: &mut [T]) -> Result> { + // Pre-hooks for all entities before the transaction. + if self.hooks.has_hooks() { + for entity in entities.iter() { + let mut ctx = HookRunner::build_context( + T::TYPE_NAME, + TypeKind::Entity, + CrudOperation::Insert, + entity.to_attribute_values(), + entity.iid().map(String::from), + ); + self.hooks.run_pre_hooks(&mut ctx).await?; + } + } + let tx = self.db.transaction_context(TxType::Write).await?; let mut iids = Vec::with_capacity(entities.len()); @@ -261,12 +384,43 @@ impl<'db, T: TypeBridgeEntity> EntityManager<'db, T> { } tx.commit().await?; + + // Post-hooks after successful commit. + if self.hooks.has_hooks() { + for entity in entities.iter() { + let ctx = HookRunner::build_context( + T::TYPE_NAME, + TypeKind::Entity, + CrudOperation::Insert, + entity.to_attribute_values(), + entity.iid().map(String::from), + ); + self.hooks.run_post_hooks(&ctx).await; + } + } + Ok(iids) } /// Delete multiple entities in a single transaction. + /// + /// Pre-hooks run for ALL entities before the transaction. Post-hooks + /// run after commit. #[tracing::instrument(skip(self, entities), fields(entity_type = T::TYPE_NAME, count = entities.len()))] pub async fn delete_many(&self, entities: &[T]) -> Result<()> { + if self.hooks.has_hooks() { + for entity in entities { + let mut ctx = HookRunner::build_context( + T::TYPE_NAME, + TypeKind::Entity, + CrudOperation::Delete, + entity.to_attribute_values(), + entity.iid().map(String::from), + ); + self.hooks.run_pre_hooks(&mut ctx).await?; + } + } + let tx = self.db.transaction_context(TxType::Write).await?; for entity in entities { let typeql = query_builder::build_delete::(entity, "$e")?; @@ -274,12 +428,42 @@ impl<'db, T: TypeBridgeEntity> EntityManager<'db, T> { tx.query(&typeql).await?; } tx.commit().await?; + + if self.hooks.has_hooks() { + for entity in entities { + let ctx = HookRunner::build_context( + T::TYPE_NAME, + TypeKind::Entity, + CrudOperation::Delete, + entity.to_attribute_values(), + entity.iid().map(String::from), + ); + self.hooks.run_post_hooks(&ctx).await; + } + } + Ok(()) } /// Update multiple entities in a single transaction. + /// + /// Pre-hooks run for ALL entities before the transaction. Post-hooks + /// run after commit. #[tracing::instrument(skip(self, entities), fields(entity_type = T::TYPE_NAME, count = entities.len()))] pub async fn update_many(&self, entities: &[T]) -> Result<()> { + if self.hooks.has_hooks() { + for entity in entities { + let mut ctx = HookRunner::build_context( + T::TYPE_NAME, + TypeKind::Entity, + CrudOperation::Update, + entity.to_attribute_values(), + entity.iid().map(String::from), + ); + self.hooks.run_pre_hooks(&mut ctx).await?; + } + } + let tx = self.db.transaction_context(TxType::Write).await?; for entity in entities { let typeql = query_builder::build_update::(entity, "$e")?; @@ -287,6 +471,20 @@ impl<'db, T: TypeBridgeEntity> EntityManager<'db, T> { tx.query(&typeql).await?; } tx.commit().await?; + + if self.hooks.has_hooks() { + for entity in entities { + let ctx = HookRunner::build_context( + T::TYPE_NAME, + TypeKind::Entity, + CrudOperation::Update, + entity.to_attribute_values(), + entity.iid().map(String::from), + ); + self.hooks.run_post_hooks(&ctx).await; + } + } + Ok(()) } } diff --git a/type-bridge-core/crates/orm/src/manager/relation_manager.rs b/type-bridge-core/crates/orm/src/manager/relation_manager.rs index 0585f07..32e903b 100644 --- a/type-bridge-core/crates/orm/src/manager/relation_manager.rs +++ b/type-bridge-core/crates/orm/src/manager/relation_manager.rs @@ -4,9 +4,11 @@ //! operations for relation types, parallel to [`EntityManager`](super::EntityManager). use std::marker::PhantomData; +use std::sync::Arc; use crate::error::{OrmError, Result}; use crate::filter::Filter; +use crate::hooks::{CrudOperation, HookRunner, LifecycleHook, TypeKind}; use crate::query::RelationQuery; use crate::relation::TypeBridgeRelation; use crate::session::backend::{QueryResult, TxType}; @@ -29,6 +31,7 @@ use super::query_builder; /// ``` pub struct RelationManager<'db, R: TypeBridgeRelation> { db: &'db Database, + hooks: HookRunner, _marker: PhantomData, } @@ -37,20 +40,41 @@ impl<'db, R: TypeBridgeRelation> RelationManager<'db, R> { pub fn new(db: &'db Database) -> Self { Self { db, + hooks: HookRunner::new(), _marker: PhantomData, } } + /// Register a lifecycle hook. + /// + /// Hooks run in registration order for pre-hooks and reverse order + /// for post-hooks. Returns `&mut Self` for chaining. + pub fn add_hook(&mut self, hook: Arc) -> &mut Self { + self.hooks.add_hook(hook); + self + } + /// Insert a relation and return the assigned IID. /// /// The relation's IID is also set in-place via [`TypeBridgeRelation::set_iid`]. #[tracing::instrument(skip(self, relation), fields(relation_type = R::TYPE_NAME))] pub async fn insert(&self, relation: &mut R) -> Result { + if self.hooks.has_hooks() { + let mut ctx = HookRunner::build_context( + R::TYPE_NAME, + TypeKind::Relation, + CrudOperation::Insert, + relation.to_attribute_values(), + relation.iid().map(String::from), + ); + self.hooks.run_pre_hooks(&mut ctx).await?; + } + let typeql = query_builder::build_relation_insert_with_iid::(relation, "$r")?; tracing::debug!(typeql = %typeql, relation_type = R::TYPE_NAME, "INSERT RELATION"); let result = self.db.execute_raw(&typeql, TxType::Write).await?; - match result { + let iid = match result { QueryResult::Documents(docs) => { let doc = docs.first().ok_or_else(|| OrmError::Hydration { type_name: R::TYPE_NAME.into(), @@ -77,7 +101,20 @@ impl<'db, R: TypeBridgeRelation> RelationManager<'db, R> { type_name: R::TYPE_NAME.into(), message: "Expected Documents from insert+fetch, got Rows".into(), }), + }?; + + if self.hooks.has_hooks() { + let ctx = HookRunner::build_context( + R::TYPE_NAME, + TypeKind::Relation, + CrudOperation::Insert, + relation.to_attribute_values(), + Some(iid.clone()), + ); + self.hooks.run_post_hooks(&ctx).await; } + + Ok(iid) } /// Fetch relations matching the given filters. @@ -125,9 +162,32 @@ impl<'db, R: TypeBridgeRelation> RelationManager<'db, R> { /// Delete a specific relation instance. #[tracing::instrument(skip(self, relation), fields(relation_type = R::TYPE_NAME))] pub async fn delete(&self, relation: &R) -> Result<()> { + if self.hooks.has_hooks() { + let mut ctx = HookRunner::build_context( + R::TYPE_NAME, + TypeKind::Relation, + CrudOperation::Delete, + relation.to_attribute_values(), + relation.iid().map(String::from), + ); + self.hooks.run_pre_hooks(&mut ctx).await?; + } + let typeql = query_builder::build_relation_delete::(relation, "$r")?; tracing::debug!(typeql = %typeql, relation_type = R::TYPE_NAME, "DELETE RELATION"); self.db.execute_raw(&typeql, TxType::Write).await?; + + if self.hooks.has_hooks() { + let ctx = HookRunner::build_context( + R::TYPE_NAME, + TypeKind::Relation, + CrudOperation::Delete, + relation.to_attribute_values(), + relation.iid().map(String::from), + ); + self.hooks.run_post_hooks(&ctx).await; + } + Ok(()) } @@ -164,8 +224,23 @@ impl<'db, R: TypeBridgeRelation> RelationManager<'db, R> { /// Insert multiple relations in a single transaction. /// /// Each relation's IID is set in-place. Returns a vector of assigned IIDs. + /// Pre-hooks run for ALL relations before the transaction starts; if any + /// rejects, the entire batch aborts. Post-hooks run after commit. #[tracing::instrument(skip(self, relations), fields(relation_type = R::TYPE_NAME, count = relations.len()))] pub async fn insert_many(&self, relations: &mut [R]) -> Result> { + if self.hooks.has_hooks() { + for relation in relations.iter() { + let mut ctx = HookRunner::build_context( + R::TYPE_NAME, + TypeKind::Relation, + CrudOperation::Insert, + relation.to_attribute_values(), + relation.iid().map(String::from), + ); + self.hooks.run_pre_hooks(&mut ctx).await?; + } + } + let tx = self.db.transaction_context(TxType::Write).await?; let mut iids = Vec::with_capacity(relations.len()); @@ -201,12 +276,42 @@ impl<'db, R: TypeBridgeRelation> RelationManager<'db, R> { } tx.commit().await?; + + if self.hooks.has_hooks() { + for relation in relations.iter() { + let ctx = HookRunner::build_context( + R::TYPE_NAME, + TypeKind::Relation, + CrudOperation::Insert, + relation.to_attribute_values(), + relation.iid().map(String::from), + ); + self.hooks.run_post_hooks(&ctx).await; + } + } + Ok(iids) } /// Delete multiple relations in a single transaction. + /// + /// Pre-hooks run for ALL relations before the transaction. Post-hooks + /// run after commit. #[tracing::instrument(skip(self, relations), fields(relation_type = R::TYPE_NAME, count = relations.len()))] pub async fn delete_many(&self, relations: &[R]) -> Result<()> { + if self.hooks.has_hooks() { + for relation in relations { + let mut ctx = HookRunner::build_context( + R::TYPE_NAME, + TypeKind::Relation, + CrudOperation::Delete, + relation.to_attribute_values(), + relation.iid().map(String::from), + ); + self.hooks.run_pre_hooks(&mut ctx).await?; + } + } + let tx = self.db.transaction_context(TxType::Write).await?; for relation in relations { let typeql = query_builder::build_relation_delete::(relation, "$r")?; @@ -214,6 +319,20 @@ impl<'db, R: TypeBridgeRelation> RelationManager<'db, R> { tx.query(&typeql).await?; } tx.commit().await?; + + if self.hooks.has_hooks() { + for relation in relations { + let ctx = HookRunner::build_context( + R::TYPE_NAME, + TypeKind::Relation, + CrudOperation::Delete, + relation.to_attribute_values(), + relation.iid().map(String::from), + ); + self.hooks.run_post_hooks(&ctx).await; + } + } + Ok(()) } } diff --git a/type-bridge-core/crates/orm/tests/common/mod.rs b/type-bridge-core/crates/orm/tests/common/mod.rs new file mode 100644 index 0000000..c941bcd --- /dev/null +++ b/type-bridge-core/crates/orm/tests/common/mod.rs @@ -0,0 +1,478 @@ +//! Shared test utilities for ORM integration tests. +//! +//! Provides a mock backend, helper functions, and common model definitions +//! used across multiple test files. + +#![allow(dead_code)] + +use std::sync::{Arc, Mutex}; + +use type_bridge_orm::session::backend::{BoxFuture, DriverBackend, QueryResult, TransactionOps}; +use type_bridge_orm::*; + +// ── Mock backend ───────────────────────────────────────────────────── + +/// Records queries and returns pre-configured results (LIFO order). +pub struct MockBackend { + responses: Arc>>, + pub queries: Arc>>, +} + +impl MockBackend { + pub fn new(responses: Vec) -> Self { + Self { + responses: Arc::new(Mutex::new(responses)), + queries: Arc::new(Mutex::new(Vec::new())), + } + } +} + +impl DriverBackend for MockBackend { + fn open_transaction( + &self, + _database: &str, + _tx_type: TxType, + ) -> BoxFuture<'_, std::result::Result, OrmError>> { + let responses = Arc::clone(&self.responses); + let queries = Arc::clone(&self.queries); + Box::pin(async move { + Ok(Box::new(MockTransaction { + responses, + queries, + }) as Box) + }) + } + + fn is_open(&self) -> bool { + true + } +} + +pub struct MockTransaction { + responses: Arc>>, + queries: Arc>>, +} + +impl TransactionOps for MockTransaction { + fn query( + &mut self, + typeql: &str, + ) -> BoxFuture<'_, std::result::Result> { + self.queries.lock().unwrap().push(typeql.to_string()); + let result = self + .responses + .lock() + .unwrap() + .pop() + .unwrap_or(QueryResult::Ok); + Box::pin(async move { Ok(result) }) + } + + fn commit(&mut self) -> BoxFuture<'_, std::result::Result<(), OrmError>> { + Box::pin(async { Ok(()) }) + } +} + +// ── Failing mock backend ───────────────────────────────────────────── + +/// Mock backend whose transactions always fail with a configurable error. +pub struct FailingMockBackend { + error_message: String, +} + +impl FailingMockBackend { + pub fn new(message: &str) -> Self { + Self { + error_message: message.to_string(), + } + } +} + +impl DriverBackend for FailingMockBackend { + fn open_transaction( + &self, + _database: &str, + _tx_type: TxType, + ) -> BoxFuture<'_, std::result::Result, OrmError>> { + let msg = self.error_message.clone(); + Box::pin(async move { + Ok(Box::new(FailingMockTransaction { + error_message: msg, + }) as Box) + }) + } + + fn is_open(&self) -> bool { + true + } +} + +pub struct FailingMockTransaction { + error_message: String, +} + +impl TransactionOps for FailingMockTransaction { + fn query( + &mut self, + _typeql: &str, + ) -> BoxFuture<'_, std::result::Result> { + let msg = self.error_message.clone(); + Box::pin(async move { + Err(OrmError::Transaction(msg)) + }) + } + + fn commit(&mut self) -> BoxFuture<'_, std::result::Result<(), OrmError>> { + Box::pin(async { Ok(()) }) + } +} + +// ── Helper functions ───────────────────────────────────────────────── + +pub fn insert_response(iid: &str) -> QueryResult { + QueryResult::Documents(vec![serde_json::json!({ "iid": iid })]) +} + +// ── Attribute types ────────────────────────────────────────────────── + +define_attribute!(Name, "name", "string"); +define_attribute!(Age, "age", "long"); +define_attribute!(Position, "position", "string"); + +// ── Person entity ──────────────────────────────────────────────────── + +#[derive(Debug)] +pub struct Person { + pub iid: Option, + pub name: Name, + pub age: Age, +} + +impl TypeBridgeEntity for Person { + const TYPE_NAME: &'static str = "person"; + + fn owned_attributes() -> &'static [OwnedAttributeInfo] { + &[ + OwnedAttributeInfo { + attr_name: "name", + value_type: ValueType::String, + annotations: &[Annotation::Key], + }, + OwnedAttributeInfo { + attr_name: "age", + value_type: ValueType::Long, + annotations: &[], + }, + ] + } + + fn iid(&self) -> Option<&str> { + self.iid.as_deref() + } + + fn set_iid(&mut self, iid: String) { + self.iid = Some(iid); + } + + fn to_attribute_values(&self) -> Vec<(&'static str, AttributeValue)> { + vec![ + ("name", self.name.to_value()), + ("age", self.age.to_value()), + ] + } + + fn from_document(doc: &serde_json::Map) -> Result { + let name = doc + .get("name") + .and_then(|v| v.as_str()) + .ok_or_else(|| OrmError::Hydration { + type_name: "person".into(), + message: "missing name".into(), + })?; + let age = doc + .get("age") + .and_then(|v| v.as_i64()) + .ok_or_else(|| OrmError::Hydration { + type_name: "person".into(), + message: "missing age".into(), + })?; + Ok(Person { + iid: None, + name: Name(name.to_string()), + age: Age(age), + }) + } +} + +pub fn make_person(name: &str, age: i64) -> Person { + Person { + iid: None, + name: Name(name.into()), + age: Age(age), + } +} + +pub fn make_person_with_iid(name: &str, age: i64, iid: &str) -> Person { + Person { + iid: Some(iid.to_string()), + name: Name(name.into()), + age: Age(age), + } +} + +// ── Employment relation ────────────────────────────────────────────── + +#[derive(Debug)] +pub struct Employment { + pub iid: Option, + pub employee: RolePlayerRef, + pub employer: RolePlayerRef, + pub position: Option, +} + +impl TypeBridgeRelation for Employment { + const TYPE_NAME: &'static str = "employment"; + + fn owned_attributes() -> &'static [OwnedAttributeInfo] { + static ATTRS: [OwnedAttributeInfo; 1] = [OwnedAttributeInfo { + attr_name: "position", + value_type: ValueType::String, + annotations: &[], + }]; + &ATTRS + } + + fn role_info() -> &'static [RoleInfo] { + static ROLES: [RoleInfo; 2] = [ + RoleInfo { + role_name: "employee", + player_type_name: "person", + }, + RoleInfo { + role_name: "employer", + player_type_name: "company", + }, + ]; + &ROLES + } + + fn iid(&self) -> Option<&str> { + self.iid.as_deref() + } + + fn set_iid(&mut self, iid: String) { + self.iid = Some(iid); + } + + fn to_attribute_values(&self) -> Vec<(&'static str, AttributeValue)> { + let mut values = Vec::new(); + if let Some(ref pos) = self.position { + values.push(("position", AttributeValue::String(pos.clone()))); + } + values + } + + fn to_role_player_refs(&self) -> Vec { + vec![self.employee.clone(), self.employer.clone()] + } + + fn from_document(doc: &serde_json::Map) -> Result { + let position = doc.get("position").and_then(|v| v.as_str()).map(String::from); + Ok(Self { + iid: None, + employee: RolePlayerRef { + role: "employee", + entity_type_name: "person", + iid: None, + key: None, + }, + employer: RolePlayerRef { + role: "employer", + entity_type_name: "company", + iid: None, + key: None, + }, + position, + }) + } +} + +pub fn make_employment( + iid: Option<&str>, + emp_iid: Option<&str>, + emp_key: Option<(&'static str, AttributeValue)>, + er_iid: Option<&str>, + er_key: Option<(&'static str, AttributeValue)>, + position: Option<&str>, +) -> Employment { + Employment { + iid: iid.map(String::from), + employee: RolePlayerRef { + role: "employee", + entity_type_name: "person", + iid: emp_iid.map(String::from), + key: emp_key, + }, + employer: RolePlayerRef { + role: "employer", + entity_type_name: "company", + iid: er_iid.map(String::from), + key: er_key, + }, + position: position.map(String::from), + } +} + +// ── Lifecycle hook helpers ─────────────────────────────────────────── + +/// Hook that records all calls for verification. +pub struct RecordingHook { + pub calls: Arc>>, +} + +impl RecordingHook { + #[allow(clippy::type_complexity)] + pub fn new() -> (Self, Arc>>) { + let calls = Arc::new(Mutex::new(Vec::new())); + ( + Self { + calls: Arc::clone(&calls), + }, + calls, + ) + } +} + +impl LifecycleHook for RecordingHook { + fn name(&self) -> &str { + "recording" + } + + fn before_operation<'a>( + &'a self, + ctx: &'a mut HookContext, + ) -> BoxFuture<'a, std::result::Result> { + self.calls + .lock() + .unwrap() + .push((format!("pre:{}", ctx.type_name), ctx.operation)); + Box::pin(async { Ok(PreHookResult::Continue) }) + } + + fn after_operation<'a>( + &'a self, + ctx: &'a HookContext, + ) -> BoxFuture<'a, std::result::Result<(), HookError>> { + self.calls + .lock() + .unwrap() + .push((format!("post:{}", ctx.type_name), ctx.operation)); + Box::pin(async { Ok(()) }) + } +} + +/// Hook that rejects all operations. +pub struct RejectingHook { + pub reason: String, +} + +impl RejectingHook { + pub fn new(reason: &str) -> Self { + Self { + reason: reason.to_string(), + } + } +} + +impl LifecycleHook for RejectingHook { + fn name(&self) -> &str { + "rejecting" + } + + fn before_operation<'a>( + &'a self, + _ctx: &'a mut HookContext, + ) -> BoxFuture<'a, std::result::Result> { + let reason = self.reason.clone(); + Box::pin(async move { Ok(PreHookResult::Reject { reason }) }) + } + + fn after_operation<'a>( + &'a self, + _ctx: &'a HookContext, + ) -> BoxFuture<'a, std::result::Result<(), HookError>> { + Box::pin(async { Ok(()) }) + } +} + +/// Hook that only runs for specific operations. +pub struct OperationFilterHook { + pub allowed_op: CrudOperation, + pub calls: Arc>>, +} + +impl OperationFilterHook { + pub fn new(op: CrudOperation) -> (Self, Arc>>) { + let calls = Arc::new(Mutex::new(Vec::new())); + ( + Self { + allowed_op: op, + calls: Arc::clone(&calls), + }, + calls, + ) + } +} + +impl LifecycleHook for OperationFilterHook { + fn name(&self) -> &str { + "op-filter" + } + + fn before_operation<'a>( + &'a self, + ctx: &'a mut HookContext, + ) -> BoxFuture<'a, std::result::Result> { + self.calls.lock().unwrap().push(ctx.operation); + Box::pin(async { Ok(PreHookResult::Continue) }) + } + + fn after_operation<'a>( + &'a self, + _ctx: &'a HookContext, + ) -> BoxFuture<'a, std::result::Result<(), HookError>> { + Box::pin(async { Ok(()) }) + } + + fn should_run(&self, ctx: &HookContext) -> bool { + ctx.operation == self.allowed_op + } +} + +/// Hook whose post-hook fails (errors should be logged, not propagated). +pub struct FailingPostHook; + +impl LifecycleHook for FailingPostHook { + fn name(&self) -> &str { + "failing-post" + } + + fn before_operation<'a>( + &'a self, + _ctx: &'a mut HookContext, + ) -> BoxFuture<'a, std::result::Result> { + Box::pin(async { Ok(PreHookResult::Continue) }) + } + + fn after_operation<'a>( + &'a self, + _ctx: &'a HookContext, + ) -> BoxFuture<'a, std::result::Result<(), HookError>> { + Box::pin(async { + Err(HookError::Internal { + hook_name: "failing-post".to_string(), + source: "simulated failure".into(), + }) + }) + } +} diff --git a/type-bridge-core/crates/orm/tests/entity_manager_tests.rs b/type-bridge-core/crates/orm/tests/entity_manager_tests.rs index fffe59b..6619a74 100644 --- a/type-bridge-core/crates/orm/tests/entity_manager_tests.rs +++ b/type-bridge-core/crates/orm/tests/entity_manager_tests.rs @@ -1,141 +1,12 @@ //! Integration tests for `EntityManager` using a mock backend. -use std::sync::{Arc, Mutex}; +mod common; -use type_bridge_orm::*; - -// ── Test entity ────────────────────────────────────────────────────── - -define_attribute!(Name, "name", "string"); -define_attribute!(Age, "age", "long"); - -#[derive(Debug)] -struct Person { - iid: Option, - name: Name, - age: Age, -} - -impl TypeBridgeEntity for Person { - const TYPE_NAME: &'static str = "person"; - - fn owned_attributes() -> &'static [OwnedAttributeInfo] { - &[ - OwnedAttributeInfo { - attr_name: "name", - value_type: ValueType::String, - annotations: &[Annotation::Key], - }, - OwnedAttributeInfo { - attr_name: "age", - value_type: ValueType::Long, - annotations: &[], - }, - ] - } - - fn iid(&self) -> Option<&str> { - self.iid.as_deref() - } - - fn set_iid(&mut self, iid: String) { - self.iid = Some(iid); - } - - fn to_attribute_values(&self) -> Vec<(&'static str, AttributeValue)> { - vec![ - ("name", self.name.to_value()), - ("age", self.age.to_value()), - ] - } - - fn from_document(doc: &serde_json::Map) -> Result { - let name = doc - .get("name") - .and_then(|v| v.as_str()) - .ok_or_else(|| OrmError::Hydration { - type_name: "person".into(), - message: "missing name".into(), - })?; - let age = doc - .get("age") - .and_then(|v| v.as_i64()) - .ok_or_else(|| OrmError::Hydration { - type_name: "person".into(), - message: "missing age".into(), - })?; - Ok(Person { - iid: None, - name: Name(name.to_string()), - age: Age(age), - }) - } -} - -// ── Mock backend ───────────────────────────────────────────────────── - -use type_bridge_orm::session::backend::{BoxFuture, DriverBackend, QueryResult, TransactionOps}; +use std::sync::Arc; -/// Records queries and returns pre-configured results. -struct MockBackend { - responses: Arc>>, - queries: Arc>>, -} - -impl MockBackend { - fn new(responses: Vec) -> Self { - Self { - responses: Arc::new(Mutex::new(responses)), - queries: Arc::new(Mutex::new(Vec::new())), - } - } -} - -impl DriverBackend for MockBackend { - fn open_transaction( - &self, - _database: &str, - _tx_type: TxType, - ) -> BoxFuture<'_, std::result::Result, OrmError>> { - let responses = Arc::clone(&self.responses); - let queries = Arc::clone(&self.queries); - Box::pin(async move { - Ok(Box::new(MockTransaction { - responses, - queries, - }) as Box) - }) - } - - fn is_open(&self) -> bool { - true - } -} - -struct MockTransaction { - responses: Arc>>, - queries: Arc>>, -} - -impl TransactionOps for MockTransaction { - fn query( - &mut self, - typeql: &str, - ) -> BoxFuture<'_, std::result::Result> { - self.queries.lock().unwrap().push(typeql.to_string()); - let result = self - .responses - .lock() - .unwrap() - .pop() - .unwrap_or(QueryResult::Ok); - Box::pin(async move { Ok(result) }) - } - - fn commit(&mut self) -> BoxFuture<'_, std::result::Result<(), OrmError>> { - Box::pin(async { Ok(()) }) - } -} +use common::*; +use type_bridge_orm::session::backend::QueryResult; +use type_bridge_orm::*; // ── Tests ──────────────────────────────────────────────────────────── diff --git a/type-bridge-core/crates/orm/tests/error_handling_tests.rs b/type-bridge-core/crates/orm/tests/error_handling_tests.rs new file mode 100644 index 0000000..92b391a --- /dev/null +++ b/type-bridge-core/crates/orm/tests/error_handling_tests.rs @@ -0,0 +1,227 @@ +//! Integration tests for ORM error propagation. +//! +//! Verifies that backend failures, wrong response types, and edge-case +//! responses are correctly surfaced as the appropriate [`OrmError`] variants. + +mod common; + +use common::*; +use type_bridge_orm::session::backend::QueryResult; +use type_bridge_orm::*; + +// ── FailingMockBackend tests ──────────────────────────────────────── + +#[tokio::test] +async fn entity_insert_backend_failure_propagates() { + let backend = FailingMockBackend::new("simulated failure"); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let mut person = make_person("Alice", 30); + let manager = EntityManager::::new(&db); + let result = manager.insert(&mut person).await; + + assert!(result.is_err()); + match result.unwrap_err() { + OrmError::Transaction(msg) => assert!(msg.contains("simulated failure")), + other => panic!("Expected Transaction error, got: {other}"), + } +} + +#[tokio::test] +async fn entity_get_backend_failure_propagates() { + let backend = FailingMockBackend::new("simulated failure"); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let manager = EntityManager::::new(&db); + let result = manager.get(&[]).await; + + assert!(result.is_err()); + match result.unwrap_err() { + OrmError::Transaction(msg) => assert!(msg.contains("simulated failure")), + other => panic!("Expected Transaction error, got: {other}"), + } +} + +#[tokio::test] +async fn entity_delete_backend_failure_propagates() { + let backend = FailingMockBackend::new("simulated failure"); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let person = make_person_with_iid("Alice", 30, "0x123"); + let manager = EntityManager::::new(&db); + let result = manager.delete(&person).await; + + assert!(result.is_err()); + match result.unwrap_err() { + OrmError::Transaction(msg) => assert!(msg.contains("simulated failure")), + other => panic!("Expected Transaction error, got: {other}"), + } +} + +#[tokio::test] +async fn relation_insert_backend_failure_propagates() { + let backend = FailingMockBackend::new("simulated failure"); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let mut employment = make_employment( + None, + Some("0xemp"), + None, + Some("0xer"), + None, + Some("Engineer"), + ); + let manager = RelationManager::::new(&db); + let result = manager.insert(&mut employment).await; + + assert!(result.is_err()); + match result.unwrap_err() { + OrmError::Transaction(msg) => assert!(msg.contains("simulated failure")), + other => panic!("Expected Transaction error, got: {other}"), + } +} + +#[tokio::test] +async fn relation_get_backend_failure_propagates() { + let backend = FailingMockBackend::new("simulated failure"); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let manager = RelationManager::::new(&db); + let result = manager.get(&[]).await; + + assert!(result.is_err()); + match result.unwrap_err() { + OrmError::Transaction(msg) => assert!(msg.contains("simulated failure")), + other => panic!("Expected Transaction error, got: {other}"), + } +} + +// ── Wrong response type tests ─────────────────────────────────────── + +#[tokio::test] +async fn entity_insert_returns_ok_instead_of_documents() { + let backend = MockBackend::new(vec![QueryResult::Ok]); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let mut person = make_person("Alice", 30); + let manager = EntityManager::::new(&db); + let result = manager.insert(&mut person).await; + + assert!(result.is_err()); + match result.unwrap_err() { + OrmError::Hydration { type_name, message } => { + assert_eq!(type_name, "person"); + assert!( + message.contains("Ok"), + "Expected message mentioning 'Ok', got: {message}" + ); + } + other => panic!("Expected Hydration error, got: {other}"), + } +} + +#[tokio::test] +async fn entity_insert_returns_rows_instead_of_documents() { + let backend = MockBackend::new(vec![QueryResult::Rows(vec![serde_json::json!({ + "$count": 1 + })])]); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let mut person = make_person("Alice", 30); + let manager = EntityManager::::new(&db); + let result = manager.insert(&mut person).await; + + assert!(result.is_err()); + match result.unwrap_err() { + OrmError::Hydration { type_name, message } => { + assert_eq!(type_name, "person"); + assert!( + message.contains("Rows"), + "Expected message mentioning 'Rows', got: {message}" + ); + } + other => panic!("Expected Hydration error, got: {other}"), + } +} + +#[tokio::test] +async fn entity_insert_empty_documents_response() { + let backend = MockBackend::new(vec![QueryResult::Documents(vec![])]); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let mut person = make_person("Alice", 30); + let manager = EntityManager::::new(&db); + let result = manager.insert(&mut person).await; + + assert!(result.is_err()); + match result.unwrap_err() { + OrmError::Hydration { type_name, message } => { + assert_eq!(type_name, "person"); + assert!( + message.contains("no documents") || message.contains("No"), + "Expected message about empty documents, got: {message}" + ); + } + other => panic!("Expected Hydration error, got: {other}"), + } +} + +#[tokio::test] +async fn entity_count_wrong_response_type() { + // count expects Rows, but we return Documents + let backend = MockBackend::new(vec![QueryResult::Documents(vec![])]); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let manager = EntityManager::::new(&db); + let result = manager.count().await; + + assert!(result.is_err()); + match result.unwrap_err() { + OrmError::Hydration { type_name, message } => { + assert_eq!(type_name, "count"); + assert!( + message.contains("Documents"), + "Expected message mentioning 'Documents', got: {message}" + ); + } + other => panic!("Expected Hydration error, got: {other}"), + } +} + +#[tokio::test] +async fn entity_get_one_multiple_results() { + // get_one expects exactly 1 result; returning 2 should produce a Hydration error + let docs = vec![ + serde_json::json!({ + "_iid": "0x001", + "attributes": { + "name": [{"value": "Alice"}], + "age": [{"value": 30}] + } + }), + serde_json::json!({ + "_iid": "0x002", + "attributes": { + "name": [{"value": "Bob"}], + "age": [{"value": 25}] + } + }), + ]; + let backend = MockBackend::new(vec![QueryResult::Documents(docs)]); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let manager = EntityManager::::new(&db); + let result = manager.get_one(&[]).await; + + assert!(result.is_err()); + match result.unwrap_err() { + OrmError::Hydration { type_name, message } => { + assert_eq!(type_name, "person"); + assert!( + message.contains("Expected 1 result, got 2"), + "Expected message about multiple results, got: {message}" + ); + } + other => panic!("Expected Hydration error, got: {other}"), + } +} diff --git a/type-bridge-core/crates/orm/tests/hooks_tests.rs b/type-bridge-core/crates/orm/tests/hooks_tests.rs new file mode 100644 index 0000000..e126e70 --- /dev/null +++ b/type-bridge-core/crates/orm/tests/hooks_tests.rs @@ -0,0 +1,476 @@ +//! Tests for the lifecycle hook system. + +mod common; + +use std::sync::{Arc, Mutex}; + +use common::*; +use type_bridge_orm::session::backend::{BoxFuture, QueryResult}; +use type_bridge_orm::*; + +type CapturedContext = Arc)>>>; + +// ── Tests: Pre/post hook execution ────────────────────────────────── + +#[tokio::test] +async fn insert_fires_pre_and_post_hooks() { + let backend = MockBackend::new(vec![insert_response("0x1")]); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let (hook, calls) = RecordingHook::new(); + let mut manager = EntityManager::::new(&db); + manager.add_hook(Arc::new(hook)); + + let mut person = make_person("Alice", 30); + manager.insert(&mut person).await.unwrap(); + + let calls = calls.lock().unwrap(); + assert_eq!(calls.len(), 2); + assert_eq!(calls[0], ("pre:person".to_string(), CrudOperation::Insert)); + assert_eq!( + calls[1], + ("post:person".to_string(), CrudOperation::Insert) + ); +} + +#[tokio::test] +async fn delete_fires_pre_and_post_hooks() { + let backend = MockBackend::new(vec![QueryResult::Ok]); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let (hook, calls) = RecordingHook::new(); + let mut manager = EntityManager::::new(&db); + manager.add_hook(Arc::new(hook)); + + let person = make_person_with_iid("Alice", 30, "0xabc"); + manager.delete(&person).await.unwrap(); + + let calls = calls.lock().unwrap(); + assert_eq!(calls.len(), 2); + assert_eq!(calls[0], ("pre:person".to_string(), CrudOperation::Delete)); + assert_eq!( + calls[1], + ("post:person".to_string(), CrudOperation::Delete) + ); +} + +#[tokio::test] +async fn update_fires_pre_and_post_hooks() { + let backend = MockBackend::new(vec![QueryResult::Ok]); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let (hook, calls) = RecordingHook::new(); + let mut manager = EntityManager::::new(&db); + manager.add_hook(Arc::new(hook)); + + let person = make_person_with_iid("Alice", 31, "0xabc"); + manager.update(&person).await.unwrap(); + + let calls = calls.lock().unwrap(); + assert_eq!(calls.len(), 2); + assert_eq!(calls[0], ("pre:person".to_string(), CrudOperation::Update)); + assert_eq!( + calls[1], + ("post:person".to_string(), CrudOperation::Update) + ); +} + +#[tokio::test] +async fn put_fires_pre_and_post_hooks() { + let backend = MockBackend::new(vec![insert_response("0xput1")]); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let (hook, calls) = RecordingHook::new(); + let mut manager = EntityManager::::new(&db); + manager.add_hook(Arc::new(hook)); + + let mut person = make_person("Alice", 30); + manager.put(&mut person).await.unwrap(); + + let calls = calls.lock().unwrap(); + assert_eq!(calls.len(), 2); + assert_eq!(calls[0], ("pre:person".to_string(), CrudOperation::Put)); + assert_eq!(calls[1], ("post:person".to_string(), CrudOperation::Put)); +} + +// ── Tests: Pre-hook rejection ─────────────────────────────────────── + +#[tokio::test] +async fn pre_hook_rejection_prevents_insert() { + let backend = MockBackend::new(vec![insert_response("0x1")]); + let queries = Arc::clone(&backend.queries); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let mut manager = EntityManager::::new(&db); + manager.add_hook(Arc::new(RejectingHook::new("validation failed"))); + + let mut person = make_person("Alice", 30); + let result = manager.insert(&mut person).await; + + assert!(result.is_err()); + match result.unwrap_err() { + OrmError::Hook(HookError::Rejected { reason, .. }) => { + assert_eq!(reason, "validation failed"); + } + other => panic!("Expected Hook(Rejected), got: {other}"), + } + + // No query should have been executed + let recorded = queries.lock().unwrap(); + assert!(recorded.is_empty(), "Rejected insert should not execute query"); +} + +#[tokio::test] +async fn pre_hook_rejection_prevents_delete() { + let backend = MockBackend::new(vec![QueryResult::Ok]); + let queries = Arc::clone(&backend.queries); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let mut manager = EntityManager::::new(&db); + manager.add_hook(Arc::new(RejectingHook::new("cannot delete"))); + + let person = make_person_with_iid("Alice", 30, "0xabc"); + let result = manager.delete(&person).await; + + assert!(result.is_err()); + let recorded = queries.lock().unwrap(); + assert!(recorded.is_empty()); +} + +#[tokio::test] +async fn pre_hook_rejection_prevents_update() { + let backend = MockBackend::new(vec![QueryResult::Ok]); + let queries = Arc::clone(&backend.queries); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let mut manager = EntityManager::::new(&db); + manager.add_hook(Arc::new(RejectingHook::new("cannot update"))); + + let person = make_person_with_iid("Alice", 31, "0xabc"); + let result = manager.update(&person).await; + + assert!(result.is_err()); + let recorded = queries.lock().unwrap(); + assert!(recorded.is_empty()); +} + +#[tokio::test] +async fn pre_hook_rejection_prevents_put() { + let backend = MockBackend::new(vec![insert_response("0xput1")]); + let queries = Arc::clone(&backend.queries); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let mut manager = EntityManager::::new(&db); + manager.add_hook(Arc::new(RejectingHook::new("cannot put"))); + + let mut person = make_person("Alice", 30); + let result = manager.put(&mut person).await; + + assert!(result.is_err()); + let recorded = queries.lock().unwrap(); + assert!(recorded.is_empty()); +} + +// ── Tests: Post-hook errors are non-fatal ─────────────────────────── + +#[tokio::test] +async fn post_hook_error_does_not_propagate() { + let backend = MockBackend::new(vec![insert_response("0x1")]); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let mut manager = EntityManager::::new(&db); + manager.add_hook(Arc::new(FailingPostHook)); + + let mut person = make_person("Alice", 30); + // Should succeed despite the failing post-hook + let iid = manager.insert(&mut person).await.unwrap(); + assert_eq!(iid, "0x1"); +} + +// ── Tests: should_run filtering ───────────────────────────────────── + +#[tokio::test] +async fn should_run_filters_by_operation() { + let backend = MockBackend::new(vec![ + QueryResult::Ok, // for delete + insert_response("0x1"), // for insert + ]); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let (hook, calls) = OperationFilterHook::new(CrudOperation::Insert); + let mut manager = EntityManager::::new(&db); + manager.add_hook(Arc::new(hook)); + + // Insert should trigger the hook + let mut person = make_person("Alice", 30); + manager.insert(&mut person).await.unwrap(); + + // Delete should NOT trigger the hook + let person = make_person_with_iid("Alice", 30, "0x1"); + manager.delete(&person).await.unwrap(); + + let calls = calls.lock().unwrap(); + // Only 1 pre-hook call from insert (should_run=false skips delete) + assert_eq!(calls.len(), 1); + assert_eq!(calls[0], CrudOperation::Insert); +} + +// ── Tests: Multiple hooks ─────────────────────────────────────────── + +#[tokio::test] +async fn multiple_hooks_run_in_order() { + let backend = MockBackend::new(vec![insert_response("0x1")]); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let (hook1, calls1) = RecordingHook::new(); + let (hook2, calls2) = RecordingHook::new(); + let mut manager = EntityManager::::new(&db); + manager.add_hook(Arc::new(hook1)); + manager.add_hook(Arc::new(hook2)); + + let mut person = make_person("Alice", 30); + manager.insert(&mut person).await.unwrap(); + + // Both hooks should have been called + let c1 = calls1.lock().unwrap(); + let c2 = calls2.lock().unwrap(); + assert_eq!(c1.len(), 2); // pre + post + assert_eq!(c2.len(), 2); // pre + post +} + +#[tokio::test] +async fn rejection_short_circuits_subsequent_hooks() { + let backend = MockBackend::new(vec![insert_response("0x1")]); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let (recorder, calls) = RecordingHook::new(); + let mut manager = EntityManager::::new(&db); + // Rejecting hook first, then recording hook + manager.add_hook(Arc::new(RejectingHook::new("rejected"))); + manager.add_hook(Arc::new(recorder)); + + let mut person = make_person("Alice", 30); + let result = manager.insert(&mut person).await; + + assert!(result.is_err()); + // Recording hook should not have been called at all + let calls = calls.lock().unwrap(); + assert!(calls.is_empty()); +} + +// ── Tests: Batch operations ───────────────────────────────────────── + +#[tokio::test] +async fn insert_many_fires_hooks_per_entity() { + let backend = MockBackend::new(vec![ + insert_response("0xb2"), + insert_response("0xb1"), + ]); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let (hook, calls) = RecordingHook::new(); + let mut manager = EntityManager::::new(&db); + manager.add_hook(Arc::new(hook)); + + let mut entities = vec![ + make_person("Alice", 30), + make_person("Bob", 25), + ]; + manager.insert_many(&mut entities).await.unwrap(); + + let calls = calls.lock().unwrap(); + // 2 pre-hooks + 2 post-hooks = 4 total + assert_eq!(calls.len(), 4); + // Pre-hooks first (all before DB ops) + assert_eq!(calls[0].1, CrudOperation::Insert); + assert_eq!(calls[1].1, CrudOperation::Insert); + // Then post-hooks (all after commit) + assert_eq!(calls[2].1, CrudOperation::Insert); + assert_eq!(calls[3].1, CrudOperation::Insert); +} + +#[tokio::test] +async fn insert_many_rejection_aborts_entire_batch() { + let backend = MockBackend::new(vec![ + insert_response("0xb2"), + insert_response("0xb1"), + ]); + let queries = Arc::clone(&backend.queries); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let mut manager = EntityManager::::new(&db); + manager.add_hook(Arc::new(RejectingHook::new("batch rejected"))); + + let mut entities = vec![ + make_person("Alice", 30), + make_person("Bob", 25), + ]; + let result = manager.insert_many(&mut entities).await; + + assert!(result.is_err()); + // No queries should have been executed + let recorded = queries.lock().unwrap(); + assert!(recorded.is_empty()); +} + +#[tokio::test] +async fn delete_many_fires_hooks_per_entity() { + let backend = MockBackend::new(vec![QueryResult::Ok, QueryResult::Ok]); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let (hook, calls) = RecordingHook::new(); + let mut manager = EntityManager::::new(&db); + manager.add_hook(Arc::new(hook)); + + let entities = vec![ + make_person_with_iid("Alice", 30, "0x1"), + make_person_with_iid("Bob", 25, "0x2"), + ]; + manager.delete_many(&entities).await.unwrap(); + + let calls = calls.lock().unwrap(); + assert_eq!(calls.len(), 4); // 2 pre + 2 post + assert_eq!(calls[0].1, CrudOperation::Delete); + assert_eq!(calls[1].1, CrudOperation::Delete); + assert_eq!(calls[2].1, CrudOperation::Delete); + assert_eq!(calls[3].1, CrudOperation::Delete); +} + +#[tokio::test] +async fn update_many_fires_hooks_per_entity() { + let backend = MockBackend::new(vec![QueryResult::Ok, QueryResult::Ok]); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let (hook, calls) = RecordingHook::new(); + let mut manager = EntityManager::::new(&db); + manager.add_hook(Arc::new(hook)); + + let entities = vec![ + make_person_with_iid("Alice", 31, "0x1"), + make_person_with_iid("Bob", 26, "0x2"), + ]; + manager.update_many(&entities).await.unwrap(); + + let calls = calls.lock().unwrap(); + assert_eq!(calls.len(), 4); // 2 pre + 2 post + assert_eq!(calls[0].1, CrudOperation::Update); + assert_eq!(calls[1].1, CrudOperation::Update); +} + +// ── Tests: Zero-overhead guard ────────────────────────────────────── + +#[tokio::test] +async fn no_hooks_means_no_overhead() { + let backend = MockBackend::new(vec![insert_response("0x1")]); + let db = Database::with_backend(Box::new(backend), "testdb"); + + // Manager with NO hooks registered + let manager = EntityManager::::new(&db); + let mut person = make_person("Alice", 30); + let iid = manager.insert(&mut person).await.unwrap(); + + assert_eq!(iid, "0x1"); + assert_eq!(person.iid(), Some("0x1")); +} + +// ── Tests: HookContext attributes ─────────────────────────────────── + +#[tokio::test] +async fn pre_hook_receives_correct_context() { + let backend = MockBackend::new(vec![insert_response("0x1")]); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let captured_ctx: CapturedContext = Arc::new(Mutex::new(None)); + + struct ContextCapture { + captured: CapturedContext, + } + impl LifecycleHook for ContextCapture { + fn name(&self) -> &str { + "ctx-capture" + } + fn before_operation<'a>( + &'a self, + ctx: &'a mut HookContext, + ) -> BoxFuture<'a, std::result::Result> { + let attr_names: Vec = ctx + .attributes + .iter() + .map(|(name, _)| name.to_string()) + .collect(); + *self.captured.lock().unwrap() = Some(( + ctx.type_name.to_string(), + ctx.type_kind, + ctx.operation, + attr_names, + )); + Box::pin(async { Ok(PreHookResult::Continue) }) + } + fn after_operation<'a>( + &'a self, + _ctx: &'a HookContext, + ) -> BoxFuture<'a, std::result::Result<(), HookError>> { + Box::pin(async { Ok(()) }) + } + } + + let mut manager = EntityManager::::new(&db); + manager.add_hook(Arc::new(ContextCapture { + captured: Arc::clone(&captured_ctx), + })); + + let mut person = make_person("Alice", 30); + manager.insert(&mut person).await.unwrap(); + + let captured = captured_ctx.lock().unwrap(); + let (type_name, type_kind, operation, attrs) = captured.as_ref().unwrap(); + assert_eq!(type_name, "person"); + assert_eq!(*type_kind, TypeKind::Entity); + assert_eq!(*operation, CrudOperation::Insert); + assert!(attrs.contains(&"name".to_string())); + assert!(attrs.contains(&"age".to_string())); +} + +// ── Tests: HookRunner isolation ───────────────────────────────────── + +#[tokio::test] +async fn hook_runner_has_hooks_returns_false_when_empty() { + let runner = HookRunner::new(); + assert!(!runner.has_hooks()); +} + +#[tokio::test] +async fn hook_runner_has_hooks_returns_true_after_add() { + let mut runner = HookRunner::new(); + let (hook, _) = RecordingHook::new(); + runner.add_hook(Arc::new(hook)); + assert!(runner.has_hooks()); +} + +#[tokio::test] +async fn hook_runner_pre_hooks_noop_when_empty() { + let runner = HookRunner::new(); + let mut ctx = HookRunner::build_context( + "test", + TypeKind::Entity, + CrudOperation::Insert, + vec![], + None, + ); + // Should be a no-op, not an error + runner.run_pre_hooks(&mut ctx).await.unwrap(); +} + +#[tokio::test] +async fn hook_runner_post_hooks_noop_when_empty() { + let runner = HookRunner::new(); + let ctx = HookRunner::build_context( + "test", + TypeKind::Entity, + CrudOperation::Insert, + vec![], + None, + ); + // Should be a no-op + runner.run_post_hooks(&ctx).await; +} diff --git a/type-bridge-core/crates/orm/tests/integration_tests.rs b/type-bridge-core/crates/orm/tests/integration_tests.rs index e775401..71cf1f9 100644 --- a/type-bridge-core/crates/orm/tests/integration_tests.rs +++ b/type-bridge-core/crates/orm/tests/integration_tests.rs @@ -150,3 +150,206 @@ async fn schema_introspection() { assert!(live.attributes.contains_key("age"), "expected 'age' attribute"); assert!(live.entities.contains_key("person"), "expected 'person' entity"); } + +#[tokio::test] +#[ignore] +async fn full_relation_lifecycle() { + let Some(db) = setup_db().await else { return }; + + // Schema sync + let mut schema = SchemaManager::new(&db); + schema.register_entity::(); + schema.register_entity::(); + schema.register_relation::(); + schema.sync_schema(true, false).await.expect("schema sync failed"); + + // Insert role players + let person_mgr = EntityManager::::new(&db); + let company_mgr = EntityManager::::new(&db); + + let mut alice = Person { + iid: None, + name: Name("Alice-Rel".into()), + age: Age(30), + }; + person_mgr.insert(&mut alice).await.expect("insert person failed"); + + let mut acme = Company { + iid: None, + name: Name("Acme-Rel".into()), + }; + company_mgr.insert(&mut acme).await.expect("insert company failed"); + + // Insert relation + let rel_mgr = RelationManager::::new(&db); + let mut employment = Employment { + iid: None, + employee: RolePlayerRef { + role: "employee", + entity_type_name: "person", + iid: alice.iid().map(String::from), + key: None, + }, + employer: RolePlayerRef { + role: "employer", + entity_type_name: "company", + iid: acme.iid().map(String::from), + key: None, + }, + position: Some(Position("Engineer".into())), + }; + let rel_iid = rel_mgr.insert(&mut employment).await.expect("insert relation failed"); + assert!(!rel_iid.is_empty()); + + // Fetch all relations + let relations = rel_mgr.all().await.expect("all() relations failed"); + assert!(!relations.is_empty()); + + // Delete relation, then entities + rel_mgr.delete(&employment).await.expect("delete relation failed"); + person_mgr.delete(&alice).await.expect("delete person failed"); + company_mgr.delete(&acme).await.expect("delete company failed"); +} + +#[tokio::test] +#[ignore] +async fn entity_update_lifecycle() { + let Some(db) = setup_db().await else { return }; + + let mut schema = SchemaManager::new(&db); + schema.register_entity::(); + schema.sync_schema(true, false).await.expect("schema sync failed"); + + let manager = EntityManager::::new(&db); + + // Insert + let mut person = Person { + iid: None, + name: Name("UpdateTest".into()), + age: Age(25), + }; + manager.insert(&mut person).await.expect("insert failed"); + + // Update age + person.age = Age(26); + manager.update(&person).await.expect("update failed"); + + // Fetch and verify + let results = manager + .get(&[Filter::string_eq("name", "UpdateTest")]) + .await + .expect("get after update failed"); + assert_eq!(results.len(), 1); + assert_eq!(results[0].age.0, 26); + + // Cleanup + manager.delete(&person).await.expect("delete failed"); +} + +#[tokio::test] +#[ignore] +async fn entity_put_creates_and_updates() { + let Some(db) = setup_db().await else { return }; + + let mut schema = SchemaManager::new(&db); + schema.register_entity::(); + schema.sync_schema(true, false).await.expect("schema sync failed"); + + let manager = EntityManager::::new(&db); + + // Put (create) + let mut person = Person { + iid: None, + name: Name("PutTest".into()), + age: Age(30), + }; + let iid1 = manager.put(&mut person).await.expect("put (create) failed"); + assert!(!iid1.is_empty()); + + // Put (update) — same key, different age + let mut person2 = Person { + iid: None, + name: Name("PutTest".into()), + age: Age(31), + }; + let iid2 = manager.put(&mut person2).await.expect("put (update) failed"); + assert!(!iid2.is_empty()); + + // Verify the entity exists with updated age + let results = manager + .get(&[Filter::string_eq("name", "PutTest")]) + .await + .expect("get after put failed"); + assert!(!results.is_empty()); + + // Cleanup + manager.delete(&person2).await.expect("delete failed"); +} + +#[tokio::test] +#[ignore] +async fn query_builder_with_sort_and_limit() { + let Some(db) = setup_db().await else { return }; + + let mut schema = SchemaManager::new(&db); + schema.register_entity::(); + schema.sync_schema(true, false).await.expect("schema sync failed"); + + let manager = EntityManager::::new(&db); + + // Insert 3 people + let mut people = vec![ + Person { iid: None, name: Name("SortA".into()), age: Age(30) }, + Person { iid: None, name: Name("SortB".into()), age: Age(20) }, + Person { iid: None, name: Name("SortC".into()), age: Age(25) }, + ]; + manager.insert_many(&mut people).await.expect("insert_many failed"); + + // Query sorted by age ascending with limit 2 + let results = manager + .query() + .filter(Expr::contains("name", "Sort")) + .order_by("age", SortDir::Asc) + .limit(2) + .execute() + .await + .expect("sorted query failed"); + + assert_eq!(results.len(), 2); + assert!(results[0].age.0 <= results[1].age.0); + + // Cleanup + manager.delete_many(&people).await.expect("delete_many failed"); +} + +#[tokio::test] +#[ignore] +async fn transaction_context_batch_commit() { + let Some(db) = setup_db().await else { return }; + + let mut schema = SchemaManager::new(&db); + schema.register_entity::(); + schema.sync_schema(true, false).await.expect("schema sync failed"); + + let manager = EntityManager::::new(&db); + + // Batch insert via insert_many (uses TransactionContext internally) + let mut people = vec![ + Person { iid: None, name: Name("TxBatch1".into()), age: Age(20) }, + Person { iid: None, name: Name("TxBatch2".into()), age: Age(25) }, + ]; + let iids = manager.insert_many(&mut people).await.expect("insert_many failed"); + assert_eq!(iids.len(), 2); + + // Fetch all and verify using query builder with contains + let results = manager + .query() + .filter(Expr::contains("name", "TxBatch")) + .execute() + .await + .expect("query after batch failed"); + assert_eq!(results.len(), 2); + + // Cleanup + manager.delete_many(&people).await.expect("delete_many failed"); +} diff --git a/type-bridge-core/crates/orm/tests/relation_hooks_tests.rs b/type-bridge-core/crates/orm/tests/relation_hooks_tests.rs new file mode 100644 index 0000000..703b76d --- /dev/null +++ b/type-bridge-core/crates/orm/tests/relation_hooks_tests.rs @@ -0,0 +1,439 @@ +//! Tests for the lifecycle hook system on `RelationManager`. +//! +//! Mirrors the patterns in `hooks_tests.rs` but exercises hooks wired into +//! `RelationManager` (insert, delete, insert_many, delete_many). + +mod common; + +use std::sync::{Arc, Mutex}; + +use common::*; +use type_bridge_orm::session::backend::QueryResult; +use type_bridge_orm::*; + +type CapturedContext = Arc)>>>; + +// ── Tests: Pre/post hook execution ────────────────────────────────── + +#[tokio::test] +async fn insert_fires_pre_and_post_hooks() { + let backend = MockBackend::new(vec![insert_response("0xrel1")]); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let (hook, calls) = RecordingHook::new(); + let mut manager = RelationManager::::new(&db); + manager.add_hook(Arc::new(hook)); + + let mut emp = make_employment( + None, + None, + Some(("name", AttributeValue::String("Alice".into()))), + None, + Some(("name", AttributeValue::String("Acme".into()))), + Some("Engineer"), + ); + manager.insert(&mut emp).await.unwrap(); + + let calls = calls.lock().unwrap(); + assert_eq!(calls.len(), 2); + assert_eq!( + calls[0], + ("pre:employment".to_string(), CrudOperation::Insert) + ); + assert_eq!( + calls[1], + ("post:employment".to_string(), CrudOperation::Insert) + ); +} + +#[tokio::test] +async fn delete_fires_pre_and_post_hooks() { + let backend = MockBackend::new(vec![QueryResult::Ok]); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let (hook, calls) = RecordingHook::new(); + let mut manager = RelationManager::::new(&db); + manager.add_hook(Arc::new(hook)); + + let emp = make_employment(Some("0xabc"), None, None, None, None, None); + manager.delete(&emp).await.unwrap(); + + let calls = calls.lock().unwrap(); + assert_eq!(calls.len(), 2); + assert_eq!( + calls[0], + ("pre:employment".to_string(), CrudOperation::Delete) + ); + assert_eq!( + calls[1], + ("post:employment".to_string(), CrudOperation::Delete) + ); +} + +// ── Tests: Pre-hook rejection ─────────────────────────────────────── + +#[tokio::test] +async fn pre_hook_rejection_prevents_insert() { + let backend = MockBackend::new(vec![insert_response("0xrel1")]); + let queries = Arc::clone(&backend.queries); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let mut manager = RelationManager::::new(&db); + manager.add_hook(Arc::new(RejectingHook::new("validation failed"))); + + let mut emp = make_employment( + None, + None, + Some(("name", AttributeValue::String("Alice".into()))), + None, + Some(("name", AttributeValue::String("Acme".into()))), + Some("Engineer"), + ); + let result = manager.insert(&mut emp).await; + + assert!(result.is_err()); + match result.unwrap_err() { + OrmError::Hook(HookError::Rejected { reason, .. }) => { + assert_eq!(reason, "validation failed"); + } + other => panic!("Expected Hook(Rejected), got: {other}"), + } + + // No query should have been executed + let recorded = queries.lock().unwrap(); + assert!( + recorded.is_empty(), + "Rejected insert should not execute query" + ); +} + +#[tokio::test] +async fn pre_hook_rejection_prevents_delete() { + let backend = MockBackend::new(vec![QueryResult::Ok]); + let queries = Arc::clone(&backend.queries); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let mut manager = RelationManager::::new(&db); + manager.add_hook(Arc::new(RejectingHook::new("cannot delete"))); + + let emp = make_employment(Some("0xabc"), None, None, None, None, None); + let result = manager.delete(&emp).await; + + assert!(result.is_err()); + let recorded = queries.lock().unwrap(); + assert!(recorded.is_empty()); +} + +// ── Tests: Post-hook errors are non-fatal ─────────────────────────── + +#[tokio::test] +async fn post_hook_error_does_not_propagate() { + let backend = MockBackend::new(vec![insert_response("0xrel1")]); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let mut manager = RelationManager::::new(&db); + manager.add_hook(Arc::new(FailingPostHook)); + + let mut emp = make_employment( + None, + None, + Some(("name", AttributeValue::String("Alice".into()))), + None, + Some(("name", AttributeValue::String("Acme".into()))), + Some("Engineer"), + ); + // Should succeed despite the failing post-hook + let iid = manager.insert(&mut emp).await.unwrap(); + assert_eq!(iid, "0xrel1"); +} + +// ── Tests: should_run filtering ───────────────────────────────────── + +#[tokio::test] +async fn should_run_filters_by_operation() { + let backend = MockBackend::new(vec![ + QueryResult::Ok, // for delete + insert_response("0xrel1"), // for insert + ]); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let (hook, calls) = OperationFilterHook::new(CrudOperation::Insert); + let mut manager = RelationManager::::new(&db); + manager.add_hook(Arc::new(hook)); + + // Insert should trigger the hook + let mut emp = make_employment( + None, + None, + Some(("name", AttributeValue::String("Alice".into()))), + None, + Some(("name", AttributeValue::String("Acme".into()))), + Some("Engineer"), + ); + manager.insert(&mut emp).await.unwrap(); + + // Delete should NOT trigger the hook (filter allows only Insert) + let emp_del = make_employment(Some("0xabc"), None, None, None, None, None); + manager.delete(&emp_del).await.unwrap(); + + let calls = calls.lock().unwrap(); + // Only 1 pre-hook call from insert (should_run=false skips delete) + assert_eq!(calls.len(), 1); + assert_eq!(calls[0], CrudOperation::Insert); +} + +// ── Tests: Multiple hooks ─────────────────────────────────────────── + +#[tokio::test] +async fn multiple_hooks_run_in_order() { + let backend = MockBackend::new(vec![insert_response("0xrel1")]); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let (hook1, calls1) = RecordingHook::new(); + let (hook2, calls2) = RecordingHook::new(); + let mut manager = RelationManager::::new(&db); + manager.add_hook(Arc::new(hook1)); + manager.add_hook(Arc::new(hook2)); + + let mut emp = make_employment( + None, + None, + Some(("name", AttributeValue::String("Alice".into()))), + None, + Some(("name", AttributeValue::String("Acme".into()))), + Some("Engineer"), + ); + manager.insert(&mut emp).await.unwrap(); + + // Both hooks should have been called + let c1 = calls1.lock().unwrap(); + let c2 = calls2.lock().unwrap(); + assert_eq!(c1.len(), 2); // pre + post + assert_eq!(c2.len(), 2); // pre + post +} + +#[tokio::test] +async fn rejection_short_circuits_subsequent_hooks() { + let backend = MockBackend::new(vec![insert_response("0xrel1")]); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let (recorder, calls) = RecordingHook::new(); + let mut manager = RelationManager::::new(&db); + // Rejecting hook first, then recording hook + manager.add_hook(Arc::new(RejectingHook::new("rejected"))); + manager.add_hook(Arc::new(recorder)); + + let mut emp = make_employment( + None, + None, + Some(("name", AttributeValue::String("Alice".into()))), + None, + Some(("name", AttributeValue::String("Acme".into()))), + Some("Engineer"), + ); + let result = manager.insert(&mut emp).await; + + assert!(result.is_err()); + // Recording hook should not have been called at all + let calls = calls.lock().unwrap(); + assert!(calls.is_empty()); +} + +// ── Tests: Batch operations ───────────────────────────────────────── + +#[tokio::test] +async fn insert_many_fires_hooks_per_relation() { + let backend = MockBackend::new(vec![ + insert_response("0xbr2"), + insert_response("0xbr1"), + ]); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let (hook, calls) = RecordingHook::new(); + let mut manager = RelationManager::::new(&db); + manager.add_hook(Arc::new(hook)); + + let mut relations = vec![ + make_employment( + None, + None, + Some(("name", AttributeValue::String("Alice".into()))), + None, + Some(("name", AttributeValue::String("Acme".into()))), + Some("Engineer"), + ), + make_employment( + None, + None, + Some(("name", AttributeValue::String("Bob".into()))), + None, + Some(("name", AttributeValue::String("Acme".into()))), + Some("Manager"), + ), + ]; + manager.insert_many(&mut relations).await.unwrap(); + + let calls = calls.lock().unwrap(); + // 2 pre-hooks + 2 post-hooks = 4 total + assert_eq!(calls.len(), 4); + // Pre-hooks first (all before DB ops) + assert_eq!(calls[0].1, CrudOperation::Insert); + assert_eq!(calls[1].1, CrudOperation::Insert); + // Then post-hooks (all after commit) + assert_eq!(calls[2].1, CrudOperation::Insert); + assert_eq!(calls[3].1, CrudOperation::Insert); +} + +#[tokio::test] +async fn insert_many_rejection_aborts_entire_batch() { + let backend = MockBackend::new(vec![ + insert_response("0xbr2"), + insert_response("0xbr1"), + ]); + let queries = Arc::clone(&backend.queries); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let mut manager = RelationManager::::new(&db); + manager.add_hook(Arc::new(RejectingHook::new("batch rejected"))); + + let mut relations = vec![ + make_employment( + None, + None, + Some(("name", AttributeValue::String("Alice".into()))), + None, + Some(("name", AttributeValue::String("Acme".into()))), + Some("Engineer"), + ), + make_employment( + None, + None, + Some(("name", AttributeValue::String("Bob".into()))), + None, + Some(("name", AttributeValue::String("Acme".into()))), + Some("Manager"), + ), + ]; + let result = manager.insert_many(&mut relations).await; + + assert!(result.is_err()); + // No queries should have been executed + let recorded = queries.lock().unwrap(); + assert!(recorded.is_empty()); +} + +#[tokio::test] +async fn delete_many_fires_hooks_per_relation() { + let backend = MockBackend::new(vec![QueryResult::Ok, QueryResult::Ok]); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let (hook, calls) = RecordingHook::new(); + let mut manager = RelationManager::::new(&db); + manager.add_hook(Arc::new(hook)); + + let relations = vec![ + make_employment(Some("0xabc"), None, None, None, None, None), + make_employment(Some("0xdef"), None, None, None, None, None), + ]; + manager.delete_many(&relations).await.unwrap(); + + let calls = calls.lock().unwrap(); + assert_eq!(calls.len(), 4); // 2 pre + 2 post + assert_eq!(calls[0].1, CrudOperation::Delete); + assert_eq!(calls[1].1, CrudOperation::Delete); + assert_eq!(calls[2].1, CrudOperation::Delete); + assert_eq!(calls[3].1, CrudOperation::Delete); +} + +// ── Tests: HookContext attributes ─────────────────────────────────── + +#[tokio::test] +async fn pre_hook_receives_correct_context() { + let backend = MockBackend::new(vec![insert_response("0xrel1")]); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let captured_ctx: CapturedContext = Arc::new(Mutex::new(None)); + + struct ContextCapture { + captured: CapturedContext, + } + impl LifecycleHook for ContextCapture { + fn name(&self) -> &str { + "ctx-capture" + } + fn before_operation<'a>( + &'a self, + ctx: &'a mut HookContext, + ) -> type_bridge_orm::session::backend::BoxFuture< + 'a, + std::result::Result, + > { + let attr_names: Vec = ctx + .attributes + .iter() + .map(|(name, _)| name.to_string()) + .collect(); + *self.captured.lock().unwrap() = Some(( + ctx.type_name.to_string(), + ctx.type_kind, + ctx.operation, + attr_names, + )); + Box::pin(async { Ok(PreHookResult::Continue) }) + } + fn after_operation<'a>( + &'a self, + _ctx: &'a HookContext, + ) -> type_bridge_orm::session::backend::BoxFuture< + 'a, + std::result::Result<(), HookError>, + > { + Box::pin(async { Ok(()) }) + } + } + + let mut manager = RelationManager::::new(&db); + manager.add_hook(Arc::new(ContextCapture { + captured: Arc::clone(&captured_ctx), + })); + + let mut emp = make_employment( + None, + None, + Some(("name", AttributeValue::String("Alice".into()))), + None, + Some(("name", AttributeValue::String("Acme".into()))), + Some("Engineer"), + ); + manager.insert(&mut emp).await.unwrap(); + + let captured = captured_ctx.lock().unwrap(); + let (type_name, type_kind, operation, attrs) = captured.as_ref().unwrap(); + assert_eq!(type_name, "employment"); + assert_eq!(*type_kind, TypeKind::Relation); + assert_eq!(*operation, CrudOperation::Insert); + assert!(attrs.contains(&"position".to_string())); +} + +// ── Tests: Zero-overhead guard ────────────────────────────────────── + +#[tokio::test] +async fn no_hooks_means_no_overhead() { + let backend = MockBackend::new(vec![insert_response("0xrel1")]); + let db = Database::with_backend(Box::new(backend), "testdb"); + + // Manager with NO hooks registered + let manager = RelationManager::::new(&db); + let mut emp = make_employment( + None, + None, + Some(("name", AttributeValue::String("Alice".into()))), + None, + Some(("name", AttributeValue::String("Acme".into()))), + Some("Engineer"), + ); + let iid = manager.insert(&mut emp).await.unwrap(); + + assert_eq!(iid, "0xrel1"); + assert_eq!(emp.iid(), Some("0xrel1")); +} diff --git a/type-bridge-core/crates/orm/tests/relation_manager_tests.rs b/type-bridge-core/crates/orm/tests/relation_manager_tests.rs index 262e031..b19c384 100644 --- a/type-bridge-core/crates/orm/tests/relation_manager_tests.rs +++ b/type-bridge-core/crates/orm/tests/relation_manager_tests.rs @@ -1,182 +1,12 @@ //! Integration tests for `RelationManager` with a mock backend. -use std::sync::{Arc, Mutex}; +mod common; -use type_bridge_orm::*; - -// ── Attribute types ───────────────────────────────────────────────── - -define_attribute!(Name, "name", "string"); -define_attribute!(Position, "position", "string"); - -// ── Relation type ─────────────────────────────────────────────────── - -#[derive(Debug)] -struct Employment { - iid: Option, - employee: RolePlayerRef, - employer: RolePlayerRef, - position: Option, -} - -impl TypeBridgeRelation for Employment { - const TYPE_NAME: &'static str = "employment"; - - fn owned_attributes() -> &'static [OwnedAttributeInfo] { - static ATTRS: [OwnedAttributeInfo; 1] = [OwnedAttributeInfo { - attr_name: "position", - value_type: ValueType::String, - annotations: &[], - }]; - &ATTRS - } - - fn role_info() -> &'static [RoleInfo] { - static ROLES: [RoleInfo; 2] = [ - RoleInfo { - role_name: "employee", - player_type_name: "person", - }, - RoleInfo { - role_name: "employer", - player_type_name: "company", - }, - ]; - &ROLES - } - - fn iid(&self) -> Option<&str> { - self.iid.as_deref() - } - - fn set_iid(&mut self, iid: String) { - self.iid = Some(iid); - } - - fn to_attribute_values(&self) -> Vec<(&'static str, AttributeValue)> { - let mut values = Vec::new(); - if let Some(ref pos) = self.position { - values.push(("position", AttributeValue::String(pos.clone()))); - } - values - } - - fn to_role_player_refs(&self) -> Vec { - vec![self.employee.clone(), self.employer.clone()] - } - - fn from_document(doc: &serde_json::Map) -> Result { - let position = doc.get("position").and_then(|v| v.as_str()).map(String::from); - Ok(Self { - iid: None, - employee: RolePlayerRef { - role: "employee", - entity_type_name: "person", - iid: None, - key: None, - }, - employer: RolePlayerRef { - role: "employer", - entity_type_name: "company", - iid: None, - key: None, - }, - position, - }) - } -} +use std::sync::Arc; -// ── Mock backend (same pattern as entity_manager_tests) ───────────── - -use type_bridge_orm::session::backend::{BoxFuture, DriverBackend, QueryResult, TransactionOps}; - -struct MockBackend { - responses: Arc>>, - queries: Arc>>, -} - -impl MockBackend { - fn new(responses: Vec) -> Self { - Self { - responses: Arc::new(Mutex::new(responses)), - queries: Arc::new(Mutex::new(Vec::new())), - } - } -} - -impl DriverBackend for MockBackend { - fn open_transaction( - &self, - _database: &str, - _tx_type: TxType, - ) -> BoxFuture<'_, std::result::Result, OrmError>> { - let responses = Arc::clone(&self.responses); - let queries = Arc::clone(&self.queries); - Box::pin(async move { - Ok(Box::new(MockTransaction { - responses, - queries, - }) as Box) - }) - } - - fn is_open(&self) -> bool { - true - } -} - -struct MockTransaction { - responses: Arc>>, - queries: Arc>>, -} - -impl TransactionOps for MockTransaction { - fn query( - &mut self, - typeql: &str, - ) -> BoxFuture<'_, std::result::Result> { - self.queries.lock().unwrap().push(typeql.to_string()); - let result = self - .responses - .lock() - .unwrap() - .pop() - .unwrap_or(QueryResult::Ok); - Box::pin(async move { Ok(result) }) - } - - fn commit(&mut self) -> BoxFuture<'_, std::result::Result<(), OrmError>> { - Box::pin(async { Ok(()) }) - } -} - -// ── Tests ─────────────────────────────────────────────────────────── - -fn make_employment( - iid: Option<&str>, - emp_iid: Option<&str>, - emp_key: Option<(&'static str, AttributeValue)>, - er_iid: Option<&str>, - er_key: Option<(&'static str, AttributeValue)>, - position: Option<&str>, -) -> Employment { - Employment { - iid: iid.map(String::from), - employee: RolePlayerRef { - role: "employee", - entity_type_name: "person", - iid: emp_iid.map(String::from), - key: emp_key, - }, - employer: RolePlayerRef { - role: "employer", - entity_type_name: "company", - iid: er_iid.map(String::from), - key: er_key, - }, - position: position.map(String::from), - } -} +use common::*; +use type_bridge_orm::session::backend::QueryResult; +use type_bridge_orm::*; #[tokio::test] async fn insert_sets_iid_and_returns_it() { diff --git a/type-bridge-core/crates/orm/tests/transaction_context_tests.rs b/type-bridge-core/crates/orm/tests/transaction_context_tests.rs new file mode 100644 index 0000000..fe6a311 --- /dev/null +++ b/type-bridge-core/crates/orm/tests/transaction_context_tests.rs @@ -0,0 +1,201 @@ +//! Integration tests for `TransactionContext`. + +mod common; + +use std::sync::Arc; + +use common::*; +use type_bridge_orm::session::backend::QueryResult; +use type_bridge_orm::*; + +// ── Basic TransactionContext tests ────────────────────────────────── + +#[tokio::test] +async fn query_executes_on_shared_transaction() { + let backend = MockBackend::new(vec![QueryResult::Documents(vec![ + serde_json::json!({"name": "Alice", "age": 30}), + ])]); + let queries = Arc::clone(&backend.queries); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let tx = db.transaction_context(TxType::Write).await.unwrap(); + let result = tx.query("match $p isa person; fetch $p: name, age;").await.unwrap(); + + // Verify the query was recorded. + let recorded = queries.lock().unwrap(); + assert_eq!(recorded.len(), 1); + assert!(recorded[0].contains("match")); + + // Verify we got the expected result back. + match result { + QueryResult::Documents(docs) => { + assert_eq!(docs.len(), 1); + assert_eq!(docs[0]["name"], "Alice"); + } + other => panic!("Expected Documents, got: {other:?}"), + } +} + +#[tokio::test] +async fn commit_succeeds() { + let backend = MockBackend::new(vec![]); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let tx = db.transaction_context(TxType::Write).await.unwrap(); + let result = tx.commit().await; + + assert!(result.is_ok()); +} + +#[tokio::test] +async fn tx_type_returns_configured_type() { + // Test Read type. + let backend_read = MockBackend::new(vec![]); + let db_read = Database::with_backend(Box::new(backend_read), "testdb"); + let tx_read = db_read.transaction_context(TxType::Read).await.unwrap(); + assert_eq!(tx_read.tx_type(), TxType::Read); + + // Test Write type. + let backend_write = MockBackend::new(vec![]); + let db_write = Database::with_backend(Box::new(backend_write), "testdb"); + let tx_write = db_write.transaction_context(TxType::Write).await.unwrap(); + assert_eq!(tx_write.tx_type(), TxType::Write); +} + +#[tokio::test] +async fn clone_shares_same_transaction() { + // Two queries: second popped first (LIFO). + let backend = MockBackend::new(vec![QueryResult::Ok, QueryResult::Ok]); + let queries = Arc::clone(&backend.queries); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let tx = db.transaction_context(TxType::Write).await.unwrap(); + let tx_clone = tx.clone(); + + // Query on the original. + tx.query("insert $p isa person, has name 'Alice';") + .await + .unwrap(); + + // Query on the clone -- should go to the same underlying transaction. + tx_clone + .query("insert $p isa person, has name 'Bob';") + .await + .unwrap(); + + // Both queries should be recorded in the shared queries vec. + let recorded = queries.lock().unwrap(); + assert_eq!(recorded.len(), 2); + assert!(recorded[0].contains("Alice")); + assert!(recorded[1].contains("Bob")); +} + +#[tokio::test] +async fn multiple_queries_in_sequence() { + let backend = MockBackend::new(vec![QueryResult::Ok, QueryResult::Ok, QueryResult::Ok]); + let queries = Arc::clone(&backend.queries); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let tx = db.transaction_context(TxType::Write).await.unwrap(); + + tx.query("insert $a isa person, has name 'Alice';") + .await + .unwrap(); + tx.query("insert $b isa person, has name 'Bob';") + .await + .unwrap(); + tx.query("insert $c isa person, has name 'Carol';") + .await + .unwrap(); + + let recorded = queries.lock().unwrap(); + assert_eq!(recorded.len(), 3); + assert!(recorded[0].contains("Alice")); + assert!(recorded[1].contains("Bob")); + assert!(recorded[2].contains("Carol")); +} + +#[tokio::test] +async fn query_and_commit_sequence() { + let backend = MockBackend::new(vec![QueryResult::Ok, QueryResult::Ok]); + let queries = Arc::clone(&backend.queries); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let tx = db.transaction_context(TxType::Write).await.unwrap(); + + tx.query("insert $a isa person, has name 'Alice';") + .await + .unwrap(); + tx.query("insert $b isa person, has name 'Bob';") + .await + .unwrap(); + + // Commit should succeed after queries. + let commit_result = tx.commit().await; + assert!(commit_result.is_ok()); + + let recorded = queries.lock().unwrap(); + assert_eq!(recorded.len(), 2); +} + +// ── Batch operation tests using TransactionContext internally ──────── + +#[tokio::test] +async fn batch_insert_many_uses_transaction_context() { + // insert_many opens a TransactionContext internally and runs one query per entity. + // Responses are popped LIFO, so push in reverse order. + let backend = MockBackend::new(vec![ + insert_response("0x003"), + insert_response("0x002"), + insert_response("0x001"), + ]); + let queries = Arc::clone(&backend.queries); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let mut entities = vec![ + make_person("Alice", 30), + make_person("Bob", 25), + make_person("Carol", 35), + ]; + + let manager = EntityManager::::new(&db); + let iids = manager.insert_many(&mut entities).await.unwrap(); + + assert_eq!(iids.len(), 3); + assert_eq!(iids[0], "0x001"); + assert_eq!(iids[1], "0x002"); + assert_eq!(iids[2], "0x003"); + + // All 3 insert queries should have been routed through the same + // shared queries vec (i.e., the same underlying transaction). + let recorded = queries.lock().unwrap(); + assert_eq!(recorded.len(), 3); + assert!(recorded[0].contains("insert")); + assert!(recorded[1].contains("insert")); + assert!(recorded[2].contains("insert")); +} + +#[tokio::test] +async fn batch_delete_many_uses_transaction_context() { + // delete_many opens a TransactionContext internally and runs one query per entity. + let backend = MockBackend::new(vec![QueryResult::Ok, QueryResult::Ok, QueryResult::Ok]); + let queries = Arc::clone(&backend.queries); + let db = Database::with_backend(Box::new(backend), "testdb"); + + let entities = vec![ + make_person_with_iid("Alice", 30, "0x001"), + make_person_with_iid("Bob", 25, "0x002"), + make_person_with_iid("Carol", 35, "0x003"), + ]; + + let manager = EntityManager::::new(&db); + manager.delete_many(&entities).await.unwrap(); + + // All 3 delete queries should have been routed through the shared + // queries vec (same underlying transaction via TransactionContext). + let recorded = queries.lock().unwrap(); + assert_eq!(recorded.len(), 3); + assert!(recorded[0].contains("delete")); + assert!(recorded[1].contains("delete")); + assert!(recorded[2].contains("delete")); +} diff --git a/type-bridge-core/crates/python/Cargo.toml b/type-bridge-core/crates/python/Cargo.toml index 0ebfbb5..49841dc 100644 --- a/type-bridge-core/crates/python/Cargo.toml +++ b/type-bridge-core/crates/python/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "type-bridge-core" -version = "1.4.0" +version = "1.4.1" edition = "2024" description = "PyO3 bindings exposing the type-bridge Rust core to Python" license.workspace = true @@ -12,7 +12,7 @@ name = "type_bridge_core" crate-type = ["cdylib", "rlib"] [dependencies] -type-bridge-core-lib = { path = "../core", version = "1.4.0" } +type-bridge-core-lib = { path = "../core", version = "1.4.1" } pyo3 = { version = "0.23", features = ["extension-module"] } pythonize = "0.23" serde = { version = "1.0", features = ["derive"] } diff --git a/type-bridge-core/crates/server/Cargo.toml b/type-bridge-core/crates/server/Cargo.toml index 8521db1..6946ca0 100644 --- a/type-bridge-core/crates/server/Cargo.toml +++ b/type-bridge-core/crates/server/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "type-bridge-server" -version = "1.4.0" +version = "1.4.1" edition = "2024" description = "Query-intercepting proxy server for TypeDB with validation and audit logging" license.workspace = true @@ -20,9 +20,10 @@ required-features = ["typedb", "axum-transport"] default = ["typedb", "axum-transport"] typedb = ["dep:typedb-driver", "dep:futures"] axum-transport = ["dep:axum", "dep:tower-http"] +test-helpers = [] [dependencies] -type-bridge-core-lib = { path = "../core", version = "1.4.0" } +type-bridge-core-lib = { path = "../core", version = "1.4.1" } tokio = { version = "1", features = ["full"] } serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" @@ -42,14 +43,10 @@ axum = { version = "0.8", optional = true } tower-http = { version = "0.6", features = ["cors", "trace"], optional = true } [dev-dependencies] +type-bridge-server = { path = ".", features = ["test-helpers", "axum-transport"] } tempfile = "3" http-body-util = "0.1" tower = { version = "0.5", features = ["util"] } -criterion = { version = "0.5", features = ["html_reports"] } - -[[bench]] -name = "crud_builder" -harness = false [lints] workspace = true diff --git a/type-bridge-core/crates/server/benches/crud_builder.rs b/type-bridge-core/crates/server/benches/crud_builder.rs deleted file mode 100644 index a967468..0000000 --- a/type-bridge-core/crates/server/benches/crud_builder.rs +++ /dev/null @@ -1,271 +0,0 @@ -use std::collections::HashMap; - -use criterion::{black_box, criterion_group, criterion_main, Criterion}; - -use type_bridge_core_lib::schema::TypeSchema; -use type_bridge_server::crud::builder; -use type_bridge_server::crud::types::*; - -fn test_schema() -> TypeSchema { - TypeSchema::from_typeql( - r#" - define - attribute name, value string; - attribute age, value long; - attribute email, value string; - attribute salary, value double; - attribute active, value boolean; - attribute start-date, value date; - entity person, - owns name @key, - owns age, - owns email, - owns salary, - owns active; - entity company, - owns name @key; - relation employment, - relates employee, - relates employer, - owns start-date; - "#, - ) - .unwrap() -} - -fn make_attrs(count: usize) -> HashMap { - let attr_defs = [ - ("name", serde_json::json!("Alice"), "string"), - ("age", serde_json::json!(30), "long"), - ("email", serde_json::json!("alice@example.com"), "string"), - ("salary", serde_json::json!(75000.0), "double"), - ("active", serde_json::json!(true), "boolean"), - ]; - let mut map = HashMap::new(); - for (name, value, vtype) in attr_defs.iter().take(count) { - map.insert( - name.to_string(), - AttributeValueSpec { - value: value.clone(), - value_type: vtype.to_string(), - }, - ); - } - map -} - -fn bench_entity_insert(c: &mut Criterion) { - let schema = test_schema(); - let mut group = c.benchmark_group("crud/entity_insert"); - - let attrs_3 = make_attrs(3); - group.bench_function("3_attrs", |b| { - b.iter(|| { - builder::build_entity_insert( - black_box("person"), - black_box(&attrs_3), - black_box(&schema), - ) - .unwrap() - }) - }); - - let attrs_5 = make_attrs(5); - group.bench_function("5_attrs", |b| { - b.iter(|| { - builder::build_entity_insert( - black_box("person"), - black_box(&attrs_5), - black_box(&schema), - ) - .unwrap() - }) - }); - - group.finish(); -} - -fn bench_entity_fetch(c: &mut Criterion) { - let schema = test_schema(); - let mut group = c.benchmark_group("crud/entity_fetch"); - - group.bench_function("no_filters", |b| { - b.iter(|| { - builder::build_entity_fetch( - black_box("person"), - black_box(&[]), - black_box(&[]), - black_box(None), - black_box(None), - black_box(&schema), - ) - .unwrap() - }) - }); - - let filters = vec![ - FilterSpec { - attr: "age".to_string(), - op: ">=".to_string(), - value: AttributeValueSpec { - value: serde_json::json!(18), - value_type: "long".to_string(), - }, - }, - FilterSpec { - attr: "active".to_string(), - op: "==".to_string(), - value: AttributeValueSpec { - value: serde_json::json!(true), - value_type: "boolean".to_string(), - }, - }, - FilterSpec { - attr: "name".to_string(), - op: "!=".to_string(), - value: AttributeValueSpec { - value: serde_json::json!("deleted"), - value_type: "string".to_string(), - }, - }, - ]; - - group.bench_function("3_filters", |b| { - b.iter(|| { - builder::build_entity_fetch( - black_box("person"), - black_box(&filters), - black_box(&[]), - black_box(Some(10)), - black_box(Some(0)), - black_box(&schema), - ) - .unwrap() - }) - }); - - group.finish(); -} - -fn bench_entity_fetch_by_iid(c: &mut Criterion) { - let schema = test_schema(); - c.bench_function("crud/entity_fetch_by_iid", |b| { - b.iter(|| { - builder::build_entity_fetch_by_iid( - black_box("person"), - black_box("0xabcdef1234567890"), - black_box(&schema), - ) - .unwrap() - }) - }); -} - -fn bench_entity_update(c: &mut Criterion) { - let schema = test_schema(); - let attrs = make_attrs(3); - c.bench_function("crud/entity_update", |b| { - b.iter(|| { - builder::build_entity_update_by_iid( - black_box("person"), - black_box("0xabc123"), - black_box(&attrs), - black_box(&schema), - ) - .unwrap() - }) - }); -} - -fn bench_relation_insert(c: &mut Criterion) { - let schema = test_schema(); - let mut group = c.benchmark_group("crud/relation_insert"); - - let role_players_2 = vec![ - RolePlayerSpec { - role: "employee".to_string(), - entity_type: "person".to_string(), - iid: None, - key_attr: Some("name".to_string()), - key_value: Some(AttributeValueSpec { - value: serde_json::json!("Alice"), - value_type: "string".to_string(), - }), - }, - RolePlayerSpec { - role: "employer".to_string(), - entity_type: "company".to_string(), - iid: None, - key_attr: Some("name".to_string()), - key_value: Some(AttributeValueSpec { - value: serde_json::json!("Acme"), - value_type: "string".to_string(), - }), - }, - ]; - let attrs = HashMap::new(); - - group.bench_function("2_role_players", |b| { - b.iter(|| { - builder::build_relation_insert( - black_box("employment"), - black_box(&role_players_2), - black_box(&attrs), - black_box(&schema), - ) - .unwrap() - }) - }); - - let mut attrs_with_date = HashMap::new(); - attrs_with_date.insert( - "start-date".to_string(), - AttributeValueSpec { - value: serde_json::json!("2024-01-01"), - value_type: "date".to_string(), - }, - ); - - group.bench_function("2_role_players_with_attrs", |b| { - b.iter(|| { - builder::build_relation_insert( - black_box("employment"), - black_box(&role_players_2), - black_box(&attrs_with_date), - black_box(&schema), - ) - .unwrap() - }) - }); - - group.finish(); -} - -fn bench_batch_inserts(c: &mut Criterion) { - let schema = test_schema(); - let attrs = make_attrs(3); - - c.bench_function("crud/batch_10_entity_inserts", |b| { - b.iter(|| { - for _ in 0..10 { - builder::build_entity_insert( - black_box("person"), - black_box(&attrs), - black_box(&schema), - ) - .unwrap(); - } - }) - }); -} - -criterion_group!( - benches, - bench_entity_insert, - bench_entity_fetch, - bench_entity_fetch_by_iid, - bench_entity_update, - bench_relation_insert, - bench_batch_inserts, -); -criterion_main!(benches); diff --git a/type-bridge-core/crates/server/src/crud/builder.rs b/type-bridge-core/crates/server/src/crud/builder.rs deleted file mode 100644 index 9589a6b..0000000 --- a/type-bridge-core/crates/server/src/crud/builder.rs +++ /dev/null @@ -1,847 +0,0 @@ -//! Dynamic query builder: converts CRUD requests into AST clauses. -//! -//! Uses [`type_bridge_core_lib::ast`] and [`TypeSchema`] to build -//! validated, schema-aware queries without depending on the ORM crate. - -use std::collections::HashMap; - -use type_bridge_core_lib::ast::*; -use type_bridge_core_lib::schema::TypeSchema; - -use crate::error::PipelineError; - -use super::types::*; - -/// Convert a JSON value + value_type into an AST [`Value::Literal`]. -fn to_literal(spec: &AttributeValueSpec) -> Value { - Value::Literal(LiteralValue { - value: spec.value.clone(), - value_type: spec.value_type.clone(), - }) -} - -/// Validate that `type_name` exists as an entity in the schema. -fn require_entity(schema: &TypeSchema, type_name: &str) -> Result<(), PipelineError> { - if schema.get_entity(type_name).is_none() { - return Err(PipelineError::Validation(format!( - "Unknown entity type: '{type_name}'" - ))); - } - Ok(()) -} - -/// Validate that `type_name` exists as a relation in the schema. -fn require_relation(schema: &TypeSchema, type_name: &str) -> Result<(), PipelineError> { - if schema.get_relation(type_name).is_none() { - return Err(PipelineError::Validation(format!( - "Unknown relation type: '{type_name}'" - ))); - } - Ok(()) -} - -/// Validate that `attr_name` is owned by `type_name` (entity or relation). -fn require_owned_attribute( - schema: &TypeSchema, - type_name: &str, - attr_name: &str, -) -> Result<(), PipelineError> { - let owned = schema.get_all_owned_attributes(type_name); - if !owned.iter().any(|a| a.name == attr_name) { - return Err(PipelineError::Validation(format!( - "Type '{type_name}' does not own attribute '{attr_name}'" - ))); - } - Ok(()) -} - -/// Build INSERT clauses for an entity. -/// -/// Produces: -/// ```text -/// insert -/// $e isa , -/// has , -/// has ; -/// ``` -pub fn build_entity_insert( - type_name: &str, - attributes: &HashMap, - schema: &TypeSchema, -) -> Result, PipelineError> { - require_entity(schema, type_name)?; - - for attr_name in attributes.keys() { - require_owned_attribute(schema, type_name, attr_name)?; - } - - let mut statements = vec![Statement::Isa { - variable: "$e".to_string(), - type_name: type_name.to_string(), - }]; - - for (attr_name, spec) in attributes { - statements.push(Statement::Has { - subject_var: "$e".to_string(), - attr_name: attr_name.clone(), - value: to_literal(spec), - }); - } - - Ok(vec![Clause::Insert(statements)]) -} - -/// Build MATCH + FETCH clauses for listing entities. -/// -/// Produces: -/// ```text -/// match -/// $e isa ; -/// [filter constraints...] -/// fetch -/// $e: *; -/// [sort/limit/offset] -/// ``` -pub fn build_entity_fetch( - type_name: &str, - filters: &[FilterSpec], - sort: &[SortSpec], - limit: Option, - offset: Option, - schema: &TypeSchema, -) -> Result, PipelineError> { - require_entity(schema, type_name)?; - - let mut patterns: Vec = vec![Pattern::Entity { - variable: "$e".to_string(), - type_name: type_name.to_string(), - constraints: vec![], - is_strict: false, - }]; - - // Add filter constraints as Has patterns + ValueComparison - for (i, f) in filters.iter().enumerate() { - require_owned_attribute(schema, type_name, &f.attr)?; - let attr_var = format!("$_attr_{i}"); - patterns.push(Pattern::Has { - thing_var: "$e".to_string(), - attr_type: f.attr.clone(), - attr_var: attr_var.clone(), - }); - patterns.push(Pattern::ValueComparison { - var: attr_var, - operator: f.op.clone(), - value: to_literal(&f.value), - }); - } - - let mut clauses = vec![ - Clause::Match(patterns), - Clause::Fetch(vec![FetchItem::Wildcard { - key: "$e".to_string(), - var: "$e".to_string(), - }]), - ]; - - // Sort - if !sort.is_empty() { - let sort_fields: Vec = sort - .iter() - .map(|s| SortField { - variable: format!("$_sort_{}", s.attr), - ascending: s.dir != "desc", - }) - .collect(); - clauses.push(Clause::Sort(sort_fields)); - } - - if let Some(n) = limit { - clauses.push(Clause::Limit(n)); - } - if let Some(n) = offset { - clauses.push(Clause::Offset(n)); - } - - Ok(clauses) -} - -/// Build MATCH + FETCH clauses for fetching a single entity by IID. -pub fn build_entity_fetch_by_iid( - type_name: &str, - iid: &str, - schema: &TypeSchema, -) -> Result, PipelineError> { - require_entity(schema, type_name)?; - - let patterns = vec![Pattern::Entity { - variable: "$e".to_string(), - type_name: type_name.to_string(), - constraints: vec![Constraint::Iid(iid.to_string())], - is_strict: false, - }]; - - Ok(vec![ - Clause::Match(patterns), - Clause::Fetch(vec![FetchItem::Wildcard { - key: "$e".to_string(), - var: "$e".to_string(), - }]), - ]) -} - -/// Build MATCH + DELETE clauses for deleting an entity by IID. -pub fn build_entity_delete_by_iid( - type_name: &str, - iid: &str, - schema: &TypeSchema, -) -> Result, PipelineError> { - require_entity(schema, type_name)?; - - let patterns = vec![Pattern::Entity { - variable: "$e".to_string(), - type_name: type_name.to_string(), - constraints: vec![Constraint::Iid(iid.to_string())], - is_strict: false, - }]; - - Ok(vec![ - Clause::Match(patterns), - Clause::Delete(vec![Statement::DeleteThing("$e".to_string())]), - ]) -} - -/// Build MATCH + DELETE old attrs + INSERT new attrs for updating an entity by IID. -/// -/// Produces: -/// ```text -/// match -/// $e isa , iid ; -/// delete -/// $e has $old_attr1; -/// insert -/// $e has ; -/// ``` -pub fn build_entity_update_by_iid( - type_name: &str, - iid: &str, - attributes: &HashMap, - schema: &TypeSchema, -) -> Result, PipelineError> { - require_entity(schema, type_name)?; - - for attr_name in attributes.keys() { - require_owned_attribute(schema, type_name, attr_name)?; - } - - // Match the entity - let mut patterns: Vec = vec![Pattern::Entity { - variable: "$e".to_string(), - type_name: type_name.to_string(), - constraints: vec![Constraint::Iid(iid.to_string())], - is_strict: false, - }]; - - // For each attribute being updated, match the old value - let mut delete_stmts = Vec::new(); - let mut insert_stmts = Vec::new(); - - for (i, (attr_name, spec)) in attributes.iter().enumerate() { - let old_var = format!("$_old_{i}"); - patterns.push(Pattern::Has { - thing_var: "$e".to_string(), - attr_type: attr_name.clone(), - attr_var: old_var.clone(), - }); - delete_stmts.push(Statement::Has { - subject_var: "$e".to_string(), - attr_name: attr_name.clone(), - value: Value::Variable(old_var), - }); - insert_stmts.push(Statement::Has { - subject_var: "$e".to_string(), - attr_name: attr_name.clone(), - value: to_literal(spec), - }); - } - - Ok(vec![ - Clause::Match(patterns), - Clause::Delete(delete_stmts), - Clause::Insert(insert_stmts), - ]) -} - -/// Build clauses for inserting a relation with role players. -/// -/// First matches each role player (by IID or key attribute), then -/// inserts the relation linking them. -pub fn build_relation_insert( - type_name: &str, - role_players: &[RolePlayerSpec], - attributes: &HashMap, - schema: &TypeSchema, -) -> Result, PipelineError> { - require_relation(schema, type_name)?; - - for attr_name in attributes.keys() { - require_owned_attribute(schema, type_name, attr_name)?; - } - - let mut match_patterns: Vec = Vec::new(); - let mut ast_role_players: Vec = Vec::new(); - - for (i, rp) in role_players.iter().enumerate() { - let player_var = format!("$_player_{i}"); - - // Match the role player entity - if let Some(ref iid) = rp.iid { - match_patterns.push(Pattern::Entity { - variable: player_var.clone(), - type_name: rp.entity_type.clone(), - constraints: vec![Constraint::Iid(iid.clone())], - is_strict: false, - }); - } else if let (Some(key_attr), Some(key_value)) = - (&rp.key_attr, &rp.key_value) - { - match_patterns.push(Pattern::Entity { - variable: player_var.clone(), - type_name: rp.entity_type.clone(), - constraints: vec![Constraint::Has { - attr_name: key_attr.clone(), - value: to_literal(key_value), - }], - is_strict: false, - }); - } else { - return Err(PipelineError::Validation(format!( - "Role player '{}' must specify either 'iid' or both 'key_attr' and 'key_value'", - rp.role - ))); - } - - ast_role_players.push(RolePlayer { - role: rp.role.clone(), - player_var, - }); - } - - // Build insert statement for the relation - let mut relation_attrs: Vec = Vec::new(); - for (attr_name, spec) in attributes { - relation_attrs.push(Statement::Has { - subject_var: "$r".to_string(), - attr_name: attr_name.clone(), - value: to_literal(spec), - }); - } - - let insert_stmt = Statement::Relation { - variable: "$r".to_string(), - type_name: type_name.to_string(), - role_players: ast_role_players, - include_variable: true, - attributes: relation_attrs, - }; - - let mut clauses = Vec::new(); - if !match_patterns.is_empty() { - clauses.push(Clause::Match(match_patterns)); - } - clauses.push(Clause::Insert(vec![insert_stmt])); - - Ok(clauses) -} - -/// Build MATCH + FETCH for listing relations. -pub fn build_relation_fetch( - type_name: &str, - filters: &[FilterSpec], - sort: &[SortSpec], - limit: Option, - offset: Option, - schema: &TypeSchema, -) -> Result, PipelineError> { - require_relation(schema, type_name)?; - - let mut patterns: Vec = vec![Pattern::Relation { - variable: "$r".to_string(), - type_name: type_name.to_string(), - role_players: vec![], - constraints: vec![], - }]; - - for (i, f) in filters.iter().enumerate() { - require_owned_attribute(schema, type_name, &f.attr)?; - let attr_var = format!("$_attr_{i}"); - patterns.push(Pattern::Has { - thing_var: "$r".to_string(), - attr_type: f.attr.clone(), - attr_var: attr_var.clone(), - }); - patterns.push(Pattern::ValueComparison { - var: attr_var, - operator: f.op.clone(), - value: to_literal(&f.value), - }); - } - - let mut clauses = vec![ - Clause::Match(patterns), - Clause::Fetch(vec![FetchItem::Wildcard { - key: "$r".to_string(), - var: "$r".to_string(), - }]), - ]; - - if !sort.is_empty() { - let sort_fields: Vec = sort - .iter() - .map(|s| SortField { - variable: format!("$_sort_{}", s.attr), - ascending: s.dir != "desc", - }) - .collect(); - clauses.push(Clause::Sort(sort_fields)); - } - - if let Some(n) = limit { - clauses.push(Clause::Limit(n)); - } - if let Some(n) = offset { - clauses.push(Clause::Offset(n)); - } - - Ok(clauses) -} - -/// Build MATCH + DELETE clauses for deleting a relation by IID. -pub fn build_relation_delete_by_iid( - type_name: &str, - iid: &str, - schema: &TypeSchema, -) -> Result, PipelineError> { - require_relation(schema, type_name)?; - - let patterns = vec![Pattern::Relation { - variable: "$r".to_string(), - type_name: type_name.to_string(), - role_players: vec![], - constraints: vec![Constraint::Iid(iid.to_string())], - }]; - - Ok(vec![ - Clause::Match(patterns), - Clause::Delete(vec![Statement::DeleteThing("$r".to_string())]), - ]) -} - -#[cfg(test)] -mod tests { - use super::*; - use type_bridge_core_lib::compiler::QueryCompiler; - use type_bridge_core_lib::schema::TypeSchema; - - fn test_schema() -> TypeSchema { - TypeSchema::from_typeql( - r#" - define - attribute name, value string; - attribute age, value long; - attribute start-date, value date; - entity person, - owns name @key, - owns age; - entity company, - owns name @key; - relation employment, - relates employee, - relates employer, - owns start-date; - "#, - ) - .unwrap() - } - - fn compile(clauses: &[Clause]) -> String { - QueryCompiler::new().compile(clauses) - } - - // ============================================= - // Entity insert tests - // ============================================= - - #[test] - fn entity_insert_basic() { - let schema = test_schema(); - let mut attrs = HashMap::new(); - attrs.insert( - "name".to_string(), - AttributeValueSpec { - value: serde_json::json!("Alice"), - value_type: "string".to_string(), - }, - ); - attrs.insert( - "age".to_string(), - AttributeValueSpec { - value: serde_json::json!(30), - value_type: "long".to_string(), - }, - ); - let clauses = build_entity_insert("person", &attrs, &schema).unwrap(); - assert_eq!(clauses.len(), 1); - let typeql = compile(&clauses); - assert!(typeql.contains("insert")); - assert!(typeql.contains("person")); - } - - #[test] - fn entity_insert_unknown_type() { - let schema = test_schema(); - let attrs = HashMap::new(); - let result = build_entity_insert("nonexistent", &attrs, &schema); - assert!(result.is_err()); - assert!(result.unwrap_err().to_string().contains("Unknown entity type")); - } - - #[test] - fn entity_insert_unknown_attribute() { - let schema = test_schema(); - let mut attrs = HashMap::new(); - attrs.insert( - "email".to_string(), - AttributeValueSpec { - value: serde_json::json!("a@b.com"), - value_type: "string".to_string(), - }, - ); - let result = build_entity_insert("person", &attrs, &schema); - assert!(result.is_err()); - assert!(result.unwrap_err().to_string().contains("does not own attribute")); - } - - // ============================================= - // Entity fetch tests - // ============================================= - - #[test] - fn entity_fetch_basic() { - let schema = test_schema(); - let clauses = - build_entity_fetch("person", &[], &[], None, None, &schema).unwrap(); - assert!(clauses.len() >= 2); - let typeql = compile(&clauses); - assert!(typeql.contains("match")); - assert!(typeql.contains("person")); - assert!(typeql.contains("fetch")); - } - - #[test] - fn entity_fetch_with_limit_offset() { - let schema = test_schema(); - let clauses = - build_entity_fetch("person", &[], &[], Some(10), Some(5), &schema).unwrap(); - let typeql = compile(&clauses); - assert!(typeql.contains("limit 10")); - assert!(typeql.contains("offset 5")); - } - - #[test] - fn entity_fetch_with_filter() { - let schema = test_schema(); - let filters = vec![FilterSpec { - attr: "age".to_string(), - op: ">=".to_string(), - value: AttributeValueSpec { - value: serde_json::json!(18), - value_type: "long".to_string(), - }, - }]; - let clauses = - build_entity_fetch("person", &filters, &[], None, None, &schema).unwrap(); - let typeql = compile(&clauses); - assert!(typeql.contains(">=")); - } - - #[test] - fn entity_fetch_unknown_filter_attr() { - let schema = test_schema(); - let filters = vec![FilterSpec { - attr: "email".to_string(), - op: "==".to_string(), - value: AttributeValueSpec { - value: serde_json::json!("a@b.com"), - value_type: "string".to_string(), - }, - }]; - let result = build_entity_fetch("person", &filters, &[], None, None, &schema); - assert!(result.is_err()); - } - - // ============================================= - // Entity fetch by IID - // ============================================= - - #[test] - fn entity_fetch_by_iid() { - let schema = test_schema(); - let clauses = build_entity_fetch_by_iid("person", "0xabc123", &schema).unwrap(); - let typeql = compile(&clauses); - assert!(typeql.contains("0xabc123")); - assert!(typeql.contains("person")); - } - - #[test] - fn entity_fetch_by_iid_unknown_type() { - let schema = test_schema(); - let result = build_entity_fetch_by_iid("nonexistent", "0x1", &schema); - assert!(result.is_err()); - } - - // ============================================= - // Entity delete by IID - // ============================================= - - #[test] - fn entity_delete_by_iid() { - let schema = test_schema(); - let clauses = build_entity_delete_by_iid("person", "0xabc123", &schema).unwrap(); - let typeql = compile(&clauses); - assert!(typeql.contains("delete")); - assert!(typeql.contains("0xabc123")); - } - - #[test] - fn entity_delete_unknown_type() { - let schema = test_schema(); - let result = build_entity_delete_by_iid("nonexistent", "0x1", &schema); - assert!(result.is_err()); - } - - // ============================================= - // Entity update by IID - // ============================================= - - #[test] - fn entity_update_by_iid() { - let schema = test_schema(); - let mut attrs = HashMap::new(); - attrs.insert( - "age".to_string(), - AttributeValueSpec { - value: serde_json::json!(31), - value_type: "long".to_string(), - }, - ); - let clauses = - build_entity_update_by_iid("person", "0xabc", &attrs, &schema).unwrap(); - let typeql = compile(&clauses); - assert!(typeql.contains("match")); - assert!(typeql.contains("delete")); - assert!(typeql.contains("insert")); - assert!(typeql.contains("0xabc")); - } - - #[test] - fn entity_update_unknown_attr() { - let schema = test_schema(); - let mut attrs = HashMap::new(); - attrs.insert( - "email".to_string(), - AttributeValueSpec { - value: serde_json::json!("x@y.com"), - value_type: "string".to_string(), - }, - ); - let result = build_entity_update_by_iid("person", "0x1", &attrs, &schema); - assert!(result.is_err()); - } - - // ============================================= - // Relation insert tests - // ============================================= - - #[test] - fn relation_insert_with_key_attr() { - let schema = test_schema(); - let role_players = vec![ - RolePlayerSpec { - role: "employee".to_string(), - entity_type: "person".to_string(), - iid: None, - key_attr: Some("name".to_string()), - key_value: Some(AttributeValueSpec { - value: serde_json::json!("Alice"), - value_type: "string".to_string(), - }), - }, - RolePlayerSpec { - role: "employer".to_string(), - entity_type: "company".to_string(), - iid: None, - key_attr: Some("name".to_string()), - key_value: Some(AttributeValueSpec { - value: serde_json::json!("Acme"), - value_type: "string".to_string(), - }), - }, - ]; - let attrs = HashMap::new(); - let clauses = - build_relation_insert("employment", &role_players, &attrs, &schema).unwrap(); - let typeql = compile(&clauses); - assert!(typeql.contains("match")); - assert!(typeql.contains("insert")); - assert!(typeql.contains("employment")); - } - - #[test] - fn relation_insert_with_iid() { - let schema = test_schema(); - let role_players = vec![RolePlayerSpec { - role: "employee".to_string(), - entity_type: "person".to_string(), - iid: Some("0xabc".to_string()), - key_attr: None, - key_value: None, - }]; - let clauses = - build_relation_insert("employment", &role_players, &HashMap::new(), &schema) - .unwrap(); - let typeql = compile(&clauses); - assert!(typeql.contains("0xabc")); - } - - #[test] - fn relation_insert_missing_player_id() { - let schema = test_schema(); - let role_players = vec![RolePlayerSpec { - role: "employee".to_string(), - entity_type: "person".to_string(), - iid: None, - key_attr: None, - key_value: None, - }]; - let result = - build_relation_insert("employment", &role_players, &HashMap::new(), &schema); - assert!(result.is_err()); - assert!(result - .unwrap_err() - .to_string() - .contains("must specify either 'iid'")); - } - - #[test] - fn relation_insert_unknown_type() { - let schema = test_schema(); - let result = build_relation_insert("nonexistent", &[], &HashMap::new(), &schema); - assert!(result.is_err()); - } - - #[test] - fn relation_insert_unknown_attr() { - let schema = test_schema(); - let mut attrs = HashMap::new(); - attrs.insert( - "salary".to_string(), - AttributeValueSpec { - value: serde_json::json!(50000), - value_type: "long".to_string(), - }, - ); - let result = build_relation_insert("employment", &[], &attrs, &schema); - assert!(result.is_err()); - } - - // ============================================= - // Relation fetch tests - // ============================================= - - #[test] - fn relation_fetch_basic() { - let schema = test_schema(); - let clauses = - build_relation_fetch("employment", &[], &[], None, None, &schema).unwrap(); - let typeql = compile(&clauses); - assert!(typeql.contains("match")); - assert!(typeql.contains("employment")); - assert!(typeql.contains("fetch")); - } - - #[test] - fn relation_fetch_unknown_type() { - let schema = test_schema(); - let result = build_relation_fetch("nonexistent", &[], &[], None, None, &schema); - assert!(result.is_err()); - } - - // ============================================= - // Relation delete tests - // ============================================= - - #[test] - fn relation_delete_by_iid() { - let schema = test_schema(); - let clauses = - build_relation_delete_by_iid("employment", "0xdef", &schema).unwrap(); - let typeql = compile(&clauses); - assert!(typeql.contains("delete")); - assert!(typeql.contains("0xdef")); - } - - #[test] - fn relation_delete_unknown_type() { - let schema = test_schema(); - let result = build_relation_delete_by_iid("nonexistent", "0x1", &schema); - assert!(result.is_err()); - } - - // ============================================= - // Edge cases - // ============================================= - - #[test] - fn entity_insert_empty_attributes() { - let schema = test_schema(); - let clauses = build_entity_insert("person", &HashMap::new(), &schema).unwrap(); - assert_eq!(clauses.len(), 1); - } - - #[test] - fn entity_fetch_with_sort() { - let schema = test_schema(); - let sort = vec![SortSpec { - attr: "name".to_string(), - dir: "desc".to_string(), - }]; - let clauses = - build_entity_fetch("person", &[], &sort, None, None, &schema).unwrap(); - let typeql = compile(&clauses); - assert!(typeql.contains("sort")); - } - - #[test] - fn relation_insert_with_attrs() { - let schema = test_schema(); - let role_players = vec![RolePlayerSpec { - role: "employee".to_string(), - entity_type: "person".to_string(), - iid: Some("0x1".to_string()), - key_attr: None, - key_value: None, - }]; - let mut attrs = HashMap::new(); - attrs.insert( - "start-date".to_string(), - AttributeValueSpec { - value: serde_json::json!("2024-01-01"), - value_type: "date".to_string(), - }, - ); - let clauses = - build_relation_insert("employment", &role_players, &attrs, &schema).unwrap(); - let typeql = compile(&clauses); - assert!(typeql.contains("start-date")); - } -} diff --git a/type-bridge-core/crates/server/src/crud/handlers.rs b/type-bridge-core/crates/server/src/crud/handlers.rs deleted file mode 100644 index f36d3cc..0000000 --- a/type-bridge-core/crates/server/src/crud/handlers.rs +++ /dev/null @@ -1,338 +0,0 @@ -//! Axum CRUD handlers for entity and relation endpoints. -//! -//! Each handler extracts the path/body, builds AST clauses via the builder, -//! passes them through the [`QueryPipeline`], and returns a [`CrudResponse`]. - -use std::collections::HashMap; -use std::sync::Arc; - -use axum::extract::{Path, Query, State}; -use axum::Json; - -use crate::error::PipelineError; -use crate::pipeline::{QueryInput, QueryPipeline}; - -use super::builder; -use super::types::*; - -/// POST /entities/{type_name} -/// -/// Insert a new entity with the given attributes. -pub async fn handle_entity_insert( - State(pipeline): State>, - Path(type_name): Path, - Json(req): Json, -) -> Result, PipelineError> { - let schema = pipeline - .schema() - .ok_or_else(|| PipelineError::Schema("No schema loaded".to_string()))?; - - let clauses = builder::build_entity_insert(&type_name, &req.attributes, schema)?; - - let typeql = type_bridge_core_lib::compiler::QueryCompiler::new().compile(&clauses); - - let output = pipeline - .execute_query(QueryInput { - database: req.database, - transaction_type: "write".to_string(), - clauses, - metadata: HashMap::new(), - }) - .await?; - - Ok(Json(CrudResponse { - status: "ok".to_string(), - results: output.results, - metadata: CrudMetadata { - request_id: output.request_id, - execution_time_ms: output.execution_time_ms, - typeql, - }, - })) -} - -/// GET /entities/{type_name} -/// -/// Fetch entities with optional filters, sort, limit, and offset. -pub async fn handle_entity_fetch( - State(pipeline): State>, - Path(type_name): Path, - Query(req): Query, -) -> Result, PipelineError> { - let schema = pipeline - .schema() - .ok_or_else(|| PipelineError::Schema("No schema loaded".to_string()))?; - - let clauses = builder::build_entity_fetch( - &type_name, - &[], - &[], - req.limit, - req.offset, - schema, - )?; - - let typeql = type_bridge_core_lib::compiler::QueryCompiler::new().compile(&clauses); - - let output = pipeline - .execute_query(QueryInput { - database: req.database, - transaction_type: "read".to_string(), - clauses, - metadata: HashMap::new(), - }) - .await?; - - Ok(Json(CrudResponse { - status: "ok".to_string(), - results: output.results, - metadata: CrudMetadata { - request_id: output.request_id, - execution_time_ms: output.execution_time_ms, - typeql, - }, - })) -} - -/// Query parameters for GET /entities/{type_name}. -#[derive(Debug, serde::Deserialize)] -pub struct EntityFetchQuery { - pub database: Option, - pub limit: Option, - pub offset: Option, -} - -/// GET /entities/{type_name}/{iid} -/// -/// Fetch a single entity by its internal identifier. -pub async fn handle_entity_get_by_iid( - State(pipeline): State>, - Path((type_name, iid)): Path<(String, String)>, - Query(params): Query, -) -> Result, PipelineError> { - let schema = pipeline - .schema() - .ok_or_else(|| PipelineError::Schema("No schema loaded".to_string()))?; - - let clauses = builder::build_entity_fetch_by_iid(&type_name, &iid, schema)?; - - let typeql = type_bridge_core_lib::compiler::QueryCompiler::new().compile(&clauses); - - let output = pipeline - .execute_query(QueryInput { - database: params.database, - transaction_type: "read".to_string(), - clauses, - metadata: HashMap::new(), - }) - .await?; - - Ok(Json(CrudResponse { - status: "ok".to_string(), - results: output.results, - metadata: CrudMetadata { - request_id: output.request_id, - execution_time_ms: output.execution_time_ms, - typeql, - }, - })) -} - -/// PUT /entities/{type_name}/{iid} -/// -/// Update an entity's attributes by IID. -pub async fn handle_entity_update( - State(pipeline): State>, - Path((type_name, iid)): Path<(String, String)>, - Json(req): Json, -) -> Result, PipelineError> { - let schema = pipeline - .schema() - .ok_or_else(|| PipelineError::Schema("No schema loaded".to_string()))?; - - let clauses = - builder::build_entity_update_by_iid(&type_name, &iid, &req.attributes, schema)?; - - let typeql = type_bridge_core_lib::compiler::QueryCompiler::new().compile(&clauses); - - let output = pipeline - .execute_query(QueryInput { - database: req.database, - transaction_type: "write".to_string(), - clauses, - metadata: HashMap::new(), - }) - .await?; - - Ok(Json(CrudResponse { - status: "ok".to_string(), - results: output.results, - metadata: CrudMetadata { - request_id: output.request_id, - execution_time_ms: output.execution_time_ms, - typeql, - }, - })) -} - -/// DELETE /entities/{type_name}/{iid} -/// -/// Delete an entity by its internal identifier. -pub async fn handle_entity_delete( - State(pipeline): State>, - Path((type_name, iid)): Path<(String, String)>, - Query(params): Query, -) -> Result, PipelineError> { - let schema = pipeline - .schema() - .ok_or_else(|| PipelineError::Schema("No schema loaded".to_string()))?; - - let clauses = builder::build_entity_delete_by_iid(&type_name, &iid, schema)?; - - let typeql = type_bridge_core_lib::compiler::QueryCompiler::new().compile(&clauses); - - let output = pipeline - .execute_query(QueryInput { - database: params.database, - transaction_type: "write".to_string(), - clauses, - metadata: HashMap::new(), - }) - .await?; - - Ok(Json(CrudResponse { - status: "ok".to_string(), - results: output.results, - metadata: CrudMetadata { - request_id: output.request_id, - execution_time_ms: output.execution_time_ms, - typeql, - }, - })) -} - -/// POST /relations/{type_name} -/// -/// Insert a new relation with role players and optional attributes. -pub async fn handle_relation_insert( - State(pipeline): State>, - Path(type_name): Path, - Json(req): Json, -) -> Result, PipelineError> { - let schema = pipeline - .schema() - .ok_or_else(|| PipelineError::Schema("No schema loaded".to_string()))?; - - let clauses = builder::build_relation_insert( - &type_name, - &req.role_players, - &req.attributes, - schema, - )?; - - let typeql = type_bridge_core_lib::compiler::QueryCompiler::new().compile(&clauses); - - let output = pipeline - .execute_query(QueryInput { - database: req.database, - transaction_type: "write".to_string(), - clauses, - metadata: HashMap::new(), - }) - .await?; - - Ok(Json(CrudResponse { - status: "ok".to_string(), - results: output.results, - metadata: CrudMetadata { - request_id: output.request_id, - execution_time_ms: output.execution_time_ms, - typeql, - }, - })) -} - -/// GET /relations/{type_name} -/// -/// Fetch relations with optional limit and offset. -pub async fn handle_relation_fetch( - State(pipeline): State>, - Path(type_name): Path, - Query(req): Query, -) -> Result, PipelineError> { - let schema = pipeline - .schema() - .ok_or_else(|| PipelineError::Schema("No schema loaded".to_string()))?; - - let clauses = builder::build_relation_fetch( - &type_name, - &[], - &[], - req.limit, - req.offset, - schema, - )?; - - let typeql = type_bridge_core_lib::compiler::QueryCompiler::new().compile(&clauses); - - let output = pipeline - .execute_query(QueryInput { - database: req.database, - transaction_type: "read".to_string(), - clauses, - metadata: HashMap::new(), - }) - .await?; - - Ok(Json(CrudResponse { - status: "ok".to_string(), - results: output.results, - metadata: CrudMetadata { - request_id: output.request_id, - execution_time_ms: output.execution_time_ms, - typeql, - }, - })) -} - -/// DELETE /relations/{type_name}/{iid} -/// -/// Delete a relation by its internal identifier. -pub async fn handle_relation_delete( - State(pipeline): State>, - Path((type_name, iid)): Path<(String, String)>, - Query(params): Query, -) -> Result, PipelineError> { - let schema = pipeline - .schema() - .ok_or_else(|| PipelineError::Schema("No schema loaded".to_string()))?; - - let clauses = builder::build_relation_delete_by_iid(&type_name, &iid, schema)?; - - let typeql = type_bridge_core_lib::compiler::QueryCompiler::new().compile(&clauses); - - let output = pipeline - .execute_query(QueryInput { - database: params.database, - transaction_type: "write".to_string(), - clauses, - metadata: HashMap::new(), - }) - .await?; - - Ok(Json(CrudResponse { - status: "ok".to_string(), - results: output.results, - metadata: CrudMetadata { - request_id: output.request_id, - execution_time_ms: output.execution_time_ms, - typeql, - }, - })) -} - -/// Optional database query parameter. -#[derive(Debug, serde::Deserialize)] -pub struct DatabaseParam { - pub database: Option, -} diff --git a/type-bridge-core/crates/server/src/crud/mod.rs b/type-bridge-core/crates/server/src/crud/mod.rs deleted file mode 100644 index f379e4c..0000000 --- a/type-bridge-core/crates/server/src/crud/mod.rs +++ /dev/null @@ -1,24 +0,0 @@ -//! CRUD endpoints for entity and relation types. -//! -//! Provides REST-style HTTP endpoints that build TypeQL queries dynamically -//! from request parameters, using [`TypeSchema`](type_bridge_core_lib::schema::TypeSchema) -//! for validation. All operations flow through the [`QueryPipeline`](crate::pipeline::QueryPipeline), -//! getting validation, interceptor processing, and audit logging for free. -//! -//! # Routes -//! -//! | Method | Path | Handler | -//! |--------|------|---------| -//! | POST | `/entities/{type_name}` | Insert a new entity | -//! | GET | `/entities/{type_name}` | List entities with optional filters | -//! | GET | `/entities/{type_name}/{iid}` | Fetch entity by IID | -//! | PUT | `/entities/{type_name}/{iid}` | Update entity attributes | -//! | DELETE | `/entities/{type_name}/{iid}` | Delete entity by IID | -//! | POST | `/relations/{type_name}` | Insert a new relation | -//! | GET | `/relations/{type_name}` | List relations | -//! | DELETE | `/relations/{type_name}/{iid}` | Delete relation by IID | - -pub mod builder; -#[cfg(feature = "axum-transport")] -pub mod handlers; -pub mod types; diff --git a/type-bridge-core/crates/server/src/crud/types.rs b/type-bridge-core/crates/server/src/crud/types.rs deleted file mode 100644 index 196a4b6..0000000 --- a/type-bridge-core/crates/server/src/crud/types.rs +++ /dev/null @@ -1,258 +0,0 @@ -//! Request and response types for CRUD endpoints. - -use std::collections::HashMap; - -use serde::{Deserialize, Serialize}; - -/// Request body for inserting a new entity. -/// -/// # Example -/// -/// ```json -/// { -/// "database": "my_db", -/// "attributes": { -/// "name": { "value": "Alice", "value_type": "string" }, -/// "age": { "value": 30, "value_type": "long" } -/// } -/// } -/// ``` -#[derive(Debug, Deserialize)] -pub struct EntityInsertRequest { - /// Optional database override (uses pipeline default if not specified). - pub database: Option, - /// Attribute name-to-value map. - pub attributes: HashMap, -} - -/// Request body for fetching entities with optional filters. -/// -/// Used as query parameters on GET requests. -#[derive(Debug, Deserialize)] -pub struct EntityFetchRequest { - /// Optional database override. - pub database: Option, - /// Optional filter specifications. - #[serde(default)] - pub filters: Vec, - /// Optional sort specifications. - #[serde(default)] - pub sort: Vec, - /// Maximum number of results. - pub limit: Option, - /// Number of results to skip. - pub offset: Option, -} - -/// Request body for updating an entity's attributes. -#[derive(Debug, Deserialize)] -pub struct EntityUpdateRequest { - /// Optional database override. - pub database: Option, - /// New attribute values to set. - pub attributes: HashMap, -} - -/// Request body for inserting a new relation. -/// -/// # Example -/// -/// ```json -/// { -/// "database": "my_db", -/// "role_players": [ -/// { "role": "employee", "entity_type": "person", "key_attr": "name", "key_value": { "value": "Alice", "value_type": "string" } }, -/// { "role": "employer", "entity_type": "company", "key_attr": "name", "key_value": { "value": "Acme", "value_type": "string" } } -/// ], -/// "attributes": { -/// "start-date": { "value": "2024-01-01", "value_type": "date" } -/// } -/// } -/// ``` -#[derive(Debug, Deserialize)] -pub struct RelationInsertRequest { - /// Optional database override. - pub database: Option, - /// Role player specifications for the relation. - pub role_players: Vec, - /// Optional attributes on the relation itself. - #[serde(default)] - pub attributes: HashMap, -} - -/// A typed attribute value. -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct AttributeValueSpec { - /// The raw JSON value (string, number, boolean, etc.). - pub value: serde_json::Value, - /// The TypeDB value type (e.g. "string", "long", "double", "boolean", "datetime"). - pub value_type: String, -} - -/// A filter specification for query endpoints. -#[derive(Debug, Deserialize)] -pub struct FilterSpec { - /// The attribute name to filter on. - pub attr: String, - /// The comparison operator (e.g. "==", "!=", ">", "<", ">=", "<=", "contains", "like"). - pub op: String, - /// The value to compare against. - pub value: AttributeValueSpec, -} - -/// A sort specification for query endpoints. -#[derive(Debug, Deserialize)] -pub struct SortSpec { - /// The attribute name to sort by. - pub attr: String, - /// Sort direction: "asc" or "desc". - #[serde(default = "default_sort_dir")] - pub dir: String, -} - -fn default_sort_dir() -> String { - "asc".to_string() -} - -/// Specification for a role player in a relation insert. -#[derive(Debug, Deserialize)] -pub struct RolePlayerSpec { - /// The role name (e.g. "employee", "employer"). - pub role: String, - /// The entity type of the role player. - pub entity_type: String, - /// Optional IID to identify the role player directly. - pub iid: Option, - /// Optional key attribute name to find the role player by. - pub key_attr: Option, - /// Optional key attribute value to find the role player by. - pub key_value: Option, -} - -/// Unified CRUD response. -#[derive(Debug, Serialize)] -pub struct CrudResponse { - /// Status indicator ("ok" on success). - pub status: String, - /// The results of the operation. - pub results: serde_json::Value, - /// Response metadata (request ID, timing, etc.). - pub metadata: CrudMetadata, -} - -/// Metadata for CRUD responses. -#[derive(Debug, Serialize)] -pub struct CrudMetadata { - /// Unique request identifier for tracking. - pub request_id: String, - /// Query execution time in milliseconds. - pub execution_time_ms: u64, - /// The TypeQL query that was executed. - pub typeql: String, -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn deserialize_entity_insert_request() { - let json = serde_json::json!({ - "attributes": { - "name": { "value": "Alice", "value_type": "string" }, - "age": { "value": 30, "value_type": "long" } - } - }); - let req: EntityInsertRequest = serde_json::from_value(json).unwrap(); - assert!(req.database.is_none()); - assert_eq!(req.attributes.len(), 2); - assert_eq!(req.attributes["name"].value_type, "string"); - } - - #[test] - fn deserialize_entity_fetch_request_defaults() { - let json = serde_json::json!({}); - let req: EntityFetchRequest = serde_json::from_value(json).unwrap(); - assert!(req.filters.is_empty()); - assert!(req.sort.is_empty()); - assert!(req.limit.is_none()); - } - - #[test] - fn deserialize_relation_insert_request() { - let json = serde_json::json!({ - "role_players": [ - { - "role": "employee", - "entity_type": "person", - "key_attr": "name", - "key_value": { "value": "Alice", "value_type": "string" } - } - ], - "attributes": {} - }); - let req: RelationInsertRequest = serde_json::from_value(json).unwrap(); - assert_eq!(req.role_players.len(), 1); - assert_eq!(req.role_players[0].role, "employee"); - } - - #[test] - fn deserialize_filter_spec() { - let json = serde_json::json!({ - "attr": "age", - "op": ">=", - "value": { "value": 18, "value_type": "long" } - }); - let filter: FilterSpec = serde_json::from_value(json).unwrap(); - assert_eq!(filter.attr, "age"); - assert_eq!(filter.op, ">="); - } - - #[test] - fn sort_spec_default_direction() { - let json = serde_json::json!({ "attr": "name" }); - let sort: SortSpec = serde_json::from_value(json).unwrap(); - assert_eq!(sort.dir, "asc"); - } - - #[test] - fn serialize_crud_response() { - let resp = CrudResponse { - status: "ok".to_string(), - results: serde_json::json!({"inserted": true}), - metadata: CrudMetadata { - request_id: "abc-123".to_string(), - execution_time_ms: 42, - typeql: "insert $e isa person;".to_string(), - }, - }; - let json = serde_json::to_value(&resp).unwrap(); - assert_eq!(json["status"], "ok"); - assert_eq!(json["metadata"]["execution_time_ms"], 42); - assert_eq!(json["metadata"]["typeql"], "insert $e isa person;"); - } - - #[test] - fn role_player_spec_with_iid() { - let json = serde_json::json!({ - "role": "friend", - "entity_type": "person", - "iid": "0xabc123" - }); - let rp: RolePlayerSpec = serde_json::from_value(json).unwrap(); - assert_eq!(rp.iid.as_deref(), Some("0xabc123")); - assert!(rp.key_attr.is_none()); - } - - #[test] - fn attribute_value_spec_roundtrip() { - let spec = AttributeValueSpec { - value: serde_json::json!("hello"), - value_type: "string".to_string(), - }; - let json = serde_json::to_value(&spec).unwrap(); - let spec2: AttributeValueSpec = serde_json::from_value(json).unwrap(); - assert_eq!(spec2.value, serde_json::json!("hello")); - assert_eq!(spec2.value_type, "string"); - } -} diff --git a/type-bridge-core/crates/server/src/interceptor/traits.rs b/type-bridge-core/crates/server/src/interceptor/traits.rs index 612304e..e9763a3 100644 --- a/type-bridge-core/crates/server/src/interceptor/traits.rs +++ b/type-bridge-core/crates/server/src/interceptor/traits.rs @@ -135,7 +135,6 @@ mod tests { let interceptor = MinimalInterceptor; - // Exercise name() and on_request() to cover all methods assert_eq!(interceptor.name(), "minimal"); let mut ctx = RequestContext { request_id: "req-1".into(), @@ -168,7 +167,6 @@ mod tests { } let boxed: Box = Box::new(DummyInterceptor); - // Exercise methods through trait object assert_eq!(boxed.name(), "dummy"); let mut ctx = RequestContext { request_id: "req-1".into(), diff --git a/type-bridge-core/crates/server/src/lib.rs b/type-bridge-core/crates/server/src/lib.rs index 70c89a3..e7d38c1 100644 --- a/type-bridge-core/crates/server/src/lib.rs +++ b/type-bridge-core/crates/server/src/lib.rs @@ -13,7 +13,7 @@ //! | Feature | Default | Description | //! |---------|---------|-------------| //! | `typedb` | yes | TypeDB backend via [`TypeDBClient`](typedb::TypeDBClient) | -//! | `axum-transport` | yes | HTTP server with `/query`, `/query/raw`, `/health`, `/schema` endpoints | +//! | `axum-transport` | yes | HTTP server with `/query`, `/query/validate`, `/health`, `/schema` endpoints | //! //! Disable defaults with `--no-default-features` to use the core pipeline as //! a library without any transport or backend. @@ -43,7 +43,6 @@ //! TypeQL schemas from custom sources. pub mod config; -pub mod crud; pub mod error; pub mod executor; pub mod interceptor; @@ -56,6 +55,6 @@ pub mod transport; #[cfg(feature = "typedb")] pub mod typedb; -#[cfg(test)] +#[cfg(any(test, feature = "test-helpers"))] #[cfg_attr(coverage_nightly, coverage(off))] -pub(crate) mod test_helpers; +pub mod test_helpers; diff --git a/type-bridge-core/crates/server/src/pipeline.rs b/type-bridge-core/crates/server/src/pipeline.rs index fd23506..7aa6727 100644 --- a/type-bridge-core/crates/server/src/pipeline.rs +++ b/type-bridge-core/crates/server/src/pipeline.rs @@ -3,7 +3,6 @@ use std::time::Instant; use type_bridge_core_lib::ast::Clause; use type_bridge_core_lib::compiler::QueryCompiler; -use type_bridge_core_lib::query_parser; use type_bridge_core_lib::schema::TypeSchema; use type_bridge_core_lib::validation::ValidationEngine; @@ -20,14 +19,6 @@ pub struct QueryInput { pub metadata: HashMap, } -/// Input for a raw TypeQL query. -pub struct RawQueryInput { - pub database: Option, - pub transaction_type: String, - pub query: String, - pub metadata: HashMap, -} - /// Input for a validation-only request. pub struct ValidateInput { pub clauses: Vec, @@ -86,6 +77,7 @@ pub struct QueryPipeline { interceptor_chain: InterceptorChain, default_database: String, executor: Box, + skip_validation: bool, } impl QueryPipeline { @@ -107,7 +99,9 @@ impl QueryPipeline { }; // Validate against schema - if let Some(schema) = &self.schema { + if !self.skip_validation + && let Some(schema) = &self.schema + { let result = self.validation_engine.validate_query(&input.clauses, schema); if !result.is_valid { return Err(PipelineError::Validation(format!( @@ -161,20 +155,6 @@ impl QueryPipeline { }) } - /// Execute a raw TypeQL query through the full pipeline (parse → validate → intercept → compile → execute). - pub async fn execute_raw(&self, input: RawQueryInput) -> Result { - let clauses = query_parser::parse_typeql_query(&input.query) - .map_err(|e| PipelineError::Parse(e.to_string()))?; - - self.execute_query(QueryInput { - database: input.database, - transaction_type: input.transaction_type, - clauses, - metadata: input.metadata, - }) - .await - } - /// Validate clauses against the loaded schema without executing. pub fn validate(&self, input: &ValidateInput) -> Result { let schema = self @@ -234,6 +214,7 @@ pub struct PipelineBuilder { schema_source: Option>, interceptors: Vec>, default_database: String, + skip_validation: bool, } impl PipelineBuilder { @@ -244,6 +225,7 @@ impl PipelineBuilder { schema_source: None, interceptors: Vec::new(), default_database: String::new(), + skip_validation: false, } } @@ -265,6 +247,15 @@ impl PipelineBuilder { self } + /// Skip schema validation during query execution. + /// + /// The schema is still loaded (and accessible via [`QueryPipeline::schema`]), + /// but queries are not validated against it before execution. + pub fn with_skip_validation(mut self) -> Self { + self.skip_validation = true; + self + } + /// Build the pipeline, loading the schema if a source was provided. pub fn build(self) -> Result { let schema = match self.schema_source { @@ -278,6 +269,7 @@ impl PipelineBuilder { interceptor_chain: InterceptorChain::new(self.interceptors), default_database: self.default_database, executor: self.executor, + skip_validation: self.skip_validation, }) } } @@ -467,7 +459,6 @@ mod tests { .unwrap(); assert!(pipeline.schema().is_none()); - // Execute a query to exercise PassthroughInterceptor's name() and on_request() let input = make_query_input(vec![]); let output = pipeline.execute_query(input).await.unwrap(); assert_eq!(output.interceptors_applied, vec!["first", "second"]); @@ -506,7 +497,6 @@ mod tests { #[tokio::test] async fn execute_query_skips_validation_when_no_schema() { let pipeline = make_pipeline(MockExecutor::new(), false); - // Even invalid type names pass when there's no schema let clauses = vec![Clause::Match(vec![Pattern::Entity { variable: "x".to_string(), type_name: "nonexistent_type".to_string(), @@ -529,7 +519,6 @@ mod tests { #[tokio::test] async fn execute_query_validates_when_schema_present_invalid() { let pipeline = make_pipeline(MockExecutor::new(), true); - // Reference an attribute type not in schema let clauses = vec![Clause::Match(vec![Pattern::Entity { variable: "p".to_string(), type_name: "person".to_string(), @@ -602,7 +591,6 @@ mod tests { assert!(!output.request_id.is_empty()); assert_eq!(output.results, serde_json::json!({"ok": true})); assert_eq!(output.interceptors_applied, vec!["counter"]); - // execution_time_ms is non-negative (it's u64, always >= 0) assert_eq!(count.load(Ordering::SeqCst), 1); } @@ -625,7 +613,6 @@ mod tests { pipeline.execute_query(input).await.unwrap(); let recorded = calls.lock().unwrap(); - // The compiled TypeQL should be a non-empty string assert!(!recorded[0].1.is_empty()); } @@ -647,75 +634,6 @@ mod tests { assert_eq!(recorded[0].2, "write"); } - // ============================================= - // execute_raw tests - // ============================================= - - #[tokio::test] - async fn execute_raw_valid_typeql() { - let executor = MockExecutor::new(); - let calls = executor.calls.clone(); - let pipeline = make_pipeline(executor, false); - - let input = RawQueryInput { - database: None, - transaction_type: "read".to_string(), - query: "match $p isa person;".to_string(), - metadata: HashMap::new(), - }; - let result = pipeline.execute_raw(input).await; - assert!(result.is_ok()); - - let recorded = calls.lock().unwrap(); - assert_eq!(recorded.len(), 1); - } - - #[tokio::test] - async fn execute_raw_invalid_typeql() { - let pipeline = make_pipeline(MockExecutor::new(), false); - let input = RawQueryInput { - database: None, - transaction_type: "read".to_string(), - query: "this is totally invalid <<>>".to_string(), - metadata: HashMap::new(), - }; - let result = pipeline.execute_raw(input).await; - let err = result.unwrap_err(); - assert!(matches!(&err, PipelineError::Parse(_))); - } - - #[tokio::test] - async fn execute_raw_database_passthrough() { - let executor = MockExecutor::new(); - let calls = executor.calls.clone(); - let pipeline = make_pipeline(executor, false); - - let input = RawQueryInput { - database: Some("raw_db".to_string()), - transaction_type: "read".to_string(), - query: "match $p isa person;".to_string(), - metadata: HashMap::new(), - }; - pipeline.execute_raw(input).await.unwrap(); - - let recorded = calls.lock().unwrap(); - assert_eq!(recorded[0].0, "raw_db"); - } - - #[tokio::test] - async fn execute_raw_executor_failure() { - let pipeline = make_pipeline(MockExecutor::failing("raw fail"), false); - let input = RawQueryInput { - database: None, - transaction_type: "read".to_string(), - query: "match $p isa person;".to_string(), - metadata: HashMap::new(), - }; - let result = pipeline.execute_raw(input).await; - let err = result.unwrap_err(); - assert!(matches!(&err, PipelineError::QueryExecution(msg) if msg.contains("raw fail"))); - } - // ============================================= // validate tests // ============================================= diff --git a/type-bridge-core/crates/server/src/test_helpers.rs b/type-bridge-core/crates/server/src/test_helpers.rs index 3db8cdd..a1c8601 100644 --- a/type-bridge-core/crates/server/src/test_helpers.rs +++ b/type-bridge-core/crates/server/src/test_helpers.rs @@ -60,6 +60,12 @@ pub struct MockExecutor { pub connected: Arc>, } +impl Default for MockExecutor { + fn default() -> Self { + Self::new() + } +} + impl MockExecutor { pub fn new() -> Self { Self { @@ -121,3 +127,4 @@ pub fn make_pipeline(executor: MockExecutor, with_schema: bool) -> QueryPipeline builder.build().expect("Failed to build test pipeline") } + diff --git a/type-bridge-core/crates/server/src/transport/http.rs b/type-bridge-core/crates/server/src/transport/http.rs index 7e6d746..a9ba680 100644 --- a/type-bridge-core/crates/server/src/transport/http.rs +++ b/type-bridge-core/crates/server/src/transport/http.rs @@ -7,9 +7,8 @@ use axum::routing::{get, post}; use axum::{Json, Router}; use serde::Serialize; -use crate::crud::handlers; use crate::error::PipelineError; -use crate::pipeline::{QueryInput, QueryPipeline, RawQueryInput, ValidateInput}; +use crate::pipeline::{QueryInput, QueryPipeline, ValidateInput}; use crate::transport::types::*; // --- Axum-specific error response types --- @@ -59,30 +58,9 @@ impl IntoResponse for PipelineError { pub fn create_router(pipeline: Arc) -> Router { Router::new() .route("/query", post(handle_query)) - .route("/query/raw", post(handle_raw_query)) .route("/query/validate", post(handle_validate)) .route("/health", get(handle_health)) .route("/schema", get(handle_schema)) - // CRUD entity endpoints - .route( - "/entities/{type_name}", - post(handlers::handle_entity_insert).get(handlers::handle_entity_fetch), - ) - .route( - "/entities/{type_name}/{iid}", - get(handlers::handle_entity_get_by_iid) - .put(handlers::handle_entity_update) - .delete(handlers::handle_entity_delete), - ) - // CRUD relation endpoints - .route( - "/relations/{type_name}", - post(handlers::handle_relation_insert).get(handlers::handle_relation_fetch), - ) - .route( - "/relations/{type_name}/{iid}", - axum::routing::delete(handlers::handle_relation_delete), - ) .with_state(pipeline) } @@ -112,30 +90,6 @@ async fn handle_query( })) } -async fn handle_raw_query( - State(pipeline): State>, - Json(req): Json, -) -> Result, PipelineError> { - let output = pipeline - .execute_raw(RawQueryInput { - database: req.database, - transaction_type: req.transaction_type, - query: req.query, - metadata: req.metadata, - }) - .await?; - - Ok(Json(QueryResponse { - status: "ok".to_string(), - results: output.results, - metadata: ResponseMetadata { - request_id: output.request_id, - execution_time_ms: output.execution_time_ms, - interceptors_applied: output.interceptors_applied, - }, - })) -} - async fn handle_validate( State(pipeline): State>, Json(req): Json, @@ -499,78 +453,6 @@ mod tests { assert_eq!(json["error"]["code"], "VALIDATION_FAILED"); } - // ============================================= - // handle_raw_query tests - // ============================================= - - #[tokio::test] - async fn raw_query_success() { - let router = app(MockExecutor::new(), false); - let body = serde_json::json!({ - "transaction_type": "read", - "query": "match $p isa person;" - }); - let req = json_request("POST", "/query/raw", body); - let resp = router.oneshot(req).await.unwrap(); - assert_eq!(resp.status(), StatusCode::OK); - - let json = body_json(resp).await; - assert_eq!(json["status"], "ok"); - } - - #[tokio::test] - async fn raw_query_parse_failure() { - let router = app(MockExecutor::new(), false); - let body = serde_json::json!({ - "transaction_type": "read", - "query": "totally invalid <<>>" - }); - let req = json_request("POST", "/query/raw", body); - let resp = router.oneshot(req).await.unwrap(); - assert_eq!(resp.status(), StatusCode::BAD_REQUEST); - - let json = body_json(resp).await; - assert_eq!(json["error"]["code"], "PARSE_ERROR"); - } - - #[tokio::test] - async fn raw_query_executor_failure() { - let router = app(MockExecutor::failing("raw db error"), false); - let body = serde_json::json!({ - "transaction_type": "read", - "query": "match $p isa person;" - }); - let req = json_request("POST", "/query/raw", body); - let resp = router.oneshot(req).await.unwrap(); - assert_eq!(resp.status(), StatusCode::BAD_REQUEST); - } - - #[tokio::test] - async fn raw_query_bad_json() { - let router = app(MockExecutor::new(), false); - let req = Request::builder() - .method("POST") - .uri("/query/raw") - .header(http::header::CONTENT_TYPE, "application/json") - .body(Body::from("{invalid")) - .unwrap(); - let resp = router.oneshot(req).await.unwrap(); - assert_eq!(resp.status(), StatusCode::BAD_REQUEST); - } - - #[tokio::test] - async fn raw_query_empty_query_parses_ok() { - // Empty query parses to empty clauses, which then compile to empty TypeQL - let router = app(MockExecutor::new(), false); - let body = serde_json::json!({ - "transaction_type": "read", - "query": "" - }); - let req = json_request("POST", "/query/raw", body); - let resp = router.oneshot(req).await.unwrap(); - assert_eq!(resp.status(), StatusCode::OK); - } - // ============================================= // handle_validate tests // ============================================= @@ -693,18 +575,6 @@ mod tests { assert_ne!(resp.status(), StatusCode::NOT_FOUND); } - #[tokio::test] - async fn router_raw_query_route_exists() { - let router = app(MockExecutor::new(), false); - let body = serde_json::json!({ - "transaction_type": "read", - "query": "match $p isa person;" - }); - let req = json_request("POST", "/query/raw", body); - let resp = router.oneshot(req).await.unwrap(); - assert_ne!(resp.status(), StatusCode::NOT_FOUND); - } - #[tokio::test] async fn router_validate_route_exists() { let router = app(MockExecutor::new(), true); @@ -777,179 +647,4 @@ mod tests { assert!(content_type.to_str().unwrap().contains("application/json")); } - // ============================================= - // CRUD route tests - // ============================================= - - #[tokio::test] - async fn crud_entity_insert_route_exists() { - let router = app(MockExecutor::new(), true); - let body = serde_json::json!({ - "attributes": { - "name": { "value": "Alice", "value_type": "string" }, - "age": { "value": 30, "value_type": "long" } - } - }); - let req = json_request("POST", "/entities/person", body); - let resp = router.oneshot(req).await.unwrap(); - assert_ne!(resp.status(), StatusCode::NOT_FOUND); - } - - #[tokio::test] - async fn crud_entity_insert_returns_ok() { - let router = app(MockExecutor::new(), true); - let body = serde_json::json!({ - "attributes": { - "name": { "value": "Alice", "value_type": "string" } - } - }); - let req = json_request("POST", "/entities/person", body); - let resp = router.oneshot(req).await.unwrap(); - assert_eq!(resp.status(), StatusCode::OK); - - let json = body_json(resp).await; - assert_eq!(json["status"], "ok"); - assert!(json["metadata"]["typeql"].as_str().unwrap().contains("insert")); - } - - #[tokio::test] - async fn crud_entity_insert_unknown_type_fails() { - let router = app(MockExecutor::new(), true); - let body = serde_json::json!({ - "attributes": {} - }); - let req = json_request("POST", "/entities/nonexistent", body); - let resp = router.oneshot(req).await.unwrap(); - assert_eq!(resp.status(), StatusCode::BAD_REQUEST); - } - - #[tokio::test] - async fn crud_entity_fetch_route_exists() { - let router = app(MockExecutor::new(), true); - let req = Request::builder() - .uri("/entities/person") - .body(Body::empty()) - .unwrap(); - let resp = router.oneshot(req).await.unwrap(); - assert_ne!(resp.status(), StatusCode::NOT_FOUND); - } - - #[tokio::test] - async fn crud_entity_fetch_returns_ok() { - let router = app(MockExecutor::new(), true); - let req = Request::builder() - .uri("/entities/person") - .body(Body::empty()) - .unwrap(); - let resp = router.oneshot(req).await.unwrap(); - assert_eq!(resp.status(), StatusCode::OK); - - let json = body_json(resp).await; - assert_eq!(json["status"], "ok"); - } - - #[tokio::test] - async fn crud_entity_get_by_iid_route_exists() { - let router = app(MockExecutor::new(), true); - let req = Request::builder() - .uri("/entities/person/0xabc123") - .body(Body::empty()) - .unwrap(); - let resp = router.oneshot(req).await.unwrap(); - assert_ne!(resp.status(), StatusCode::NOT_FOUND); - } - - #[tokio::test] - async fn crud_entity_update_returns_ok() { - let router = app(MockExecutor::new(), true); - let body = serde_json::json!({ - "attributes": { - "age": { "value": 31, "value_type": "long" } - } - }); - let req = json_request("PUT", "/entities/person/0xabc", body); - let resp = router.oneshot(req).await.unwrap(); - assert_eq!(resp.status(), StatusCode::OK); - } - - #[tokio::test] - async fn crud_entity_delete_returns_ok() { - let router = app(MockExecutor::new(), true); - let req = Request::builder() - .method("DELETE") - .uri("/entities/person/0xabc123") - .body(Body::empty()) - .unwrap(); - let resp = router.oneshot(req).await.unwrap(); - assert_eq!(resp.status(), StatusCode::OK); - } - - #[tokio::test] - async fn crud_relation_insert_route_exists() { - let router = app(MockExecutor::new(), true); - let body = serde_json::json!({ - "role_players": [ - { - "role": "employee", - "entity_type": "person", - "iid": "0x1" - } - ], - "attributes": {} - }); - let req = json_request("POST", "/relations/employment", body); - let resp = router.oneshot(req).await.unwrap(); - assert_ne!(resp.status(), StatusCode::NOT_FOUND); - } - - #[tokio::test] - async fn crud_relation_delete_route_exists() { - let router = app(MockExecutor::new(), true); - let req = Request::builder() - .method("DELETE") - .uri("/relations/employment/0xdef") - .body(Body::empty()) - .unwrap(); - let resp = router.oneshot(req).await.unwrap(); - assert_ne!(resp.status(), StatusCode::NOT_FOUND); - } - - #[tokio::test] - async fn crud_no_schema_returns_error() { - let router = app(MockExecutor::new(), false); - let body = serde_json::json!({ - "attributes": {} - }); - let req = json_request("POST", "/entities/person", body); - let resp = router.oneshot(req).await.unwrap(); - assert_eq!(resp.status(), StatusCode::INTERNAL_SERVER_ERROR); - } - - #[tokio::test] - async fn crud_entity_fetch_with_limit() { - let router = app(MockExecutor::new(), true); - let req = Request::builder() - .uri("/entities/person?limit=5") - .body(Body::empty()) - .unwrap(); - let resp = router.oneshot(req).await.unwrap(); - assert_eq!(resp.status(), StatusCode::OK); - - let json = body_json(resp).await; - assert!(json["metadata"]["typeql"].as_str().unwrap().contains("limit 5")); - } - - #[tokio::test] - async fn crud_response_has_metadata() { - let router = app(MockExecutor::new(), true); - let req = Request::builder() - .uri("/entities/person") - .body(Body::empty()) - .unwrap(); - let resp = router.oneshot(req).await.unwrap(); - let json = body_json(resp).await; - assert!(json["metadata"]["request_id"].is_string()); - assert!(json["metadata"]["execution_time_ms"].is_number()); - assert!(json["metadata"]["typeql"].is_string()); - } } diff --git a/type-bridge-core/crates/server/src/transport/types.rs b/type-bridge-core/crates/server/src/transport/types.rs index 9a0eeff..a31fbca 100644 --- a/type-bridge-core/crates/server/src/transport/types.rs +++ b/type-bridge-core/crates/server/src/transport/types.rs @@ -12,16 +12,6 @@ pub struct QueryRequest { pub metadata: HashMap, } -/// Request body for POST /query/raw (raw TypeQL). -#[derive(Debug, Deserialize)] -pub struct RawQueryRequest { - pub database: Option, - pub transaction_type: String, - pub query: String, - #[serde(default)] - pub metadata: HashMap, -} - /// Request body for POST /query/validate. #[derive(Debug, Deserialize)] pub struct ValidateRequest { diff --git a/type-bridge-core/crates/server/tests/http_integration_tests.rs b/type-bridge-core/crates/server/tests/http_integration_tests.rs new file mode 100644 index 0000000..7b7ee82 --- /dev/null +++ b/type-bridge-core/crates/server/tests/http_integration_tests.rs @@ -0,0 +1,416 @@ +//! HTTP integration tests using axum's tower::ServiceExt oneshot. +//! +//! Tests the full HTTP request/response flow through the router with +//! various pipeline configurations. + +use std::future::Future; +use std::pin::Pin; +use std::sync::atomic::{AtomicUsize, Ordering}; +use std::sync::Arc; + +use axum::body::Body; +use axum::http::{self, Request, StatusCode}; +use axum::Router; +use http_body_util::BodyExt; +use tower::ServiceExt; + +use type_bridge_core_lib::ast::{Clause, Constraint, LiteralValue, Pattern, Value}; +use type_bridge_server::interceptor::{InterceptError, Interceptor, RequestContext}; +use type_bridge_server::test_helpers::{make_pipeline, make_simple_clauses, MockExecutor}; +use type_bridge_server::transport::http::create_router; + +// ── Helpers ────────────────────────────────────────────────────────── + +fn app(executor: MockExecutor, with_schema: bool) -> Router { + let pipeline = Arc::new(make_pipeline(executor, with_schema)); + create_router(pipeline) +} + +async fn body_json(response: axum::response::Response) -> serde_json::Value { + let bytes = response.into_body().collect().await.unwrap().to_bytes(); + serde_json::from_slice(&bytes).unwrap() +} + +fn json_request(method: &str, uri: &str, body: serde_json::Value) -> Request { + Request::builder() + .method(method) + .uri(uri) + .header(http::header::CONTENT_TYPE, "application/json") + .body(Body::from(serde_json::to_vec(&body).unwrap())) + .unwrap() +} + +fn invalid_clause_json() -> serde_json::Value { + let clauses = vec![Clause::Match(vec![Pattern::Entity { + variable: "p".to_string(), + type_name: "person".to_string(), + constraints: vec![Constraint::Has { + attr_name: "nonexistent".to_string(), + value: Value::Literal(LiteralValue { + value: serde_json::json!("val"), + value_type: "string".to_string(), + }), + }], + is_strict: false, + }])]; + serde_json::to_value(&clauses).unwrap() +} + +// ── Test interceptors ──────────────────────────────────────────────── + +struct CountingInterceptor { + name: String, + count: Arc, +} + +impl Interceptor for CountingInterceptor { + fn name(&self) -> &str { + &self.name + } + fn on_request<'a>( + &'a self, + clauses: Vec, + _ctx: &'a mut RequestContext, + ) -> Pin, InterceptError>> + Send + 'a>> { + Box::pin(async move { + self.count.fetch_add(1, Ordering::SeqCst); + Ok(clauses) + }) + } +} + +struct RejectingInterceptor; + +impl Interceptor for RejectingInterceptor { + fn name(&self) -> &str { + "rejector" + } + fn on_request<'a>( + &'a self, + _clauses: Vec, + _ctx: &'a mut RequestContext, + ) -> Pin, InterceptError>> + Send + 'a>> { + Box::pin(async { + Err(InterceptError::AccessDenied { + reason: "forbidden".into(), + }) + }) + } +} + +// ── Health endpoint tests ──────────────────────────────────────────── + +#[tokio::test] +async fn health_200() { + let router = app(MockExecutor::new(), false); + let req = Request::builder() + .uri("/health") + .body(Body::empty()) + .unwrap(); + let resp = router.oneshot(req).await.unwrap(); + assert_eq!(resp.status(), StatusCode::OK); + + let json = body_json(resp).await; + assert_eq!(json["status"], "ok"); + assert!(!json["version"].as_str().unwrap().is_empty()); + assert_eq!(json["typedb_connected"], true); +} + +#[tokio::test] +async fn health_reflects_connection_status() { + let executor = MockExecutor::new(); + *executor.connected.lock().unwrap() = false; + let router = app(executor, false); + let req = Request::builder() + .uri("/health") + .body(Body::empty()) + .unwrap(); + let resp = router.oneshot(req).await.unwrap(); + let json = body_json(resp).await; + assert_eq!(json["typedb_connected"], false); +} + +// ── Schema endpoint tests ──────────────────────────────────────────── + +#[tokio::test] +async fn schema_200_with_loaded_schema() { + let router = app(MockExecutor::new(), true); + let req = Request::builder() + .uri("/schema") + .body(Body::empty()) + .unwrap(); + let resp = router.oneshot(req).await.unwrap(); + assert_eq!(resp.status(), StatusCode::OK); + + let json = body_json(resp).await; + assert!(json["entities"].is_object()); +} + +#[tokio::test] +async fn schema_500_without_schema() { + let router = app(MockExecutor::new(), false); + let req = Request::builder() + .uri("/schema") + .body(Body::empty()) + .unwrap(); + let resp = router.oneshot(req).await.unwrap(); + assert_eq!(resp.status(), StatusCode::INTERNAL_SERVER_ERROR); + + let json = body_json(resp).await; + assert_eq!(json["error"]["code"], "SCHEMA_ERROR"); +} + +// ── Query success tests ────────────────────────────────────────────── + +#[tokio::test] +async fn query_success_200() { + let router = app(MockExecutor::new(), false); + let body = serde_json::json!({ + "transaction_type": "read", + "clauses": [] + }); + let req = json_request("POST", "/query", body); + let resp = router.oneshot(req).await.unwrap(); + assert_eq!(resp.status(), StatusCode::OK); + + let json = body_json(resp).await; + assert_eq!(json["status"], "ok"); +} + +#[tokio::test] +async fn query_with_database_override() { + let executor = MockExecutor::new(); + let calls = executor.calls.clone(); + let pipeline = Arc::new(make_pipeline(executor, false)); + let router = create_router(pipeline); + + let body = serde_json::json!({ + "database": "override_db", + "transaction_type": "read", + "clauses": [] + }); + let req = json_request("POST", "/query", body); + router.oneshot(req).await.unwrap(); + + let recorded = calls.lock().unwrap(); + assert_eq!(recorded[0].0, "override_db"); +} + +#[tokio::test] +async fn query_response_metadata_fields() { + let router = app(MockExecutor::new(), false); + let body = serde_json::json!({ + "transaction_type": "read", + "clauses": [] + }); + let req = json_request("POST", "/query", body); + let resp = router.oneshot(req).await.unwrap(); + let json = body_json(resp).await; + + assert!(json["metadata"]["request_id"].is_string()); + assert!(json["metadata"]["execution_time_ms"].is_number()); + assert!(json["metadata"]["interceptors_applied"].is_array()); +} + +// ── Query error tests ──────────────────────────────────────────────── + +#[tokio::test] +async fn query_validation_failure_400() { + let router = app(MockExecutor::new(), true); + let body = serde_json::json!({ + "transaction_type": "read", + "clauses": invalid_clause_json() + }); + let req = json_request("POST", "/query", body); + let resp = router.oneshot(req).await.unwrap(); + assert_eq!(resp.status(), StatusCode::BAD_REQUEST); + + let json = body_json(resp).await; + assert_eq!(json["error"]["code"], "VALIDATION_FAILED"); +} + +#[tokio::test] +async fn query_executor_failure_400() { + let router = app(MockExecutor::failing("db error"), false); + let body = serde_json::json!({ + "transaction_type": "read", + "clauses": [] + }); + let req = json_request("POST", "/query", body); + let resp = router.oneshot(req).await.unwrap(); + assert_eq!(resp.status(), StatusCode::BAD_REQUEST); + + let json = body_json(resp).await; + assert_eq!(json["error"]["code"], "QUERY_EXECUTION_ERROR"); +} + +#[tokio::test] +async fn query_bad_json_400() { + let router = app(MockExecutor::new(), false); + let req = Request::builder() + .method("POST") + .uri("/query") + .header(http::header::CONTENT_TYPE, "application/json") + .body(Body::from("not json")) + .unwrap(); + let resp = router.oneshot(req).await.unwrap(); + assert_eq!(resp.status(), StatusCode::BAD_REQUEST); +} + +#[tokio::test] +async fn query_interceptor_rejection_403() { + let pipeline = Arc::new( + type_bridge_server::pipeline::PipelineBuilder::new(MockExecutor::new()) + .with_interceptor(RejectingInterceptor) + .build() + .unwrap(), + ); + let router = create_router(pipeline); + + let body = serde_json::json!({ + "transaction_type": "read", + "clauses": [] + }); + let req = json_request("POST", "/query", body); + let resp = router.oneshot(req).await.unwrap(); + assert_eq!(resp.status(), StatusCode::FORBIDDEN); + + let json = body_json(resp).await; + assert_eq!(json["error"]["code"], "INTERCEPTOR_ERROR"); +} + +// ── Validate endpoint tests ────────────────────────────────────────── + +#[tokio::test] +async fn validate_valid_200() { + let router = app(MockExecutor::new(), true); + let clauses = serde_json::to_value(make_simple_clauses()).unwrap(); + let body = serde_json::json!({ "clauses": clauses }); + let req = json_request("POST", "/query/validate", body); + let resp = router.oneshot(req).await.unwrap(); + assert_eq!(resp.status(), StatusCode::OK); + + let json = body_json(resp).await; + assert_eq!(json["is_valid"], true); + assert!(json["errors"].as_array().unwrap().is_empty()); +} + +#[tokio::test] +async fn validate_invalid_with_errors() { + let router = app(MockExecutor::new(), true); + let body = serde_json::json!({ "clauses": invalid_clause_json() }); + let req = json_request("POST", "/query/validate", body); + let resp = router.oneshot(req).await.unwrap(); + assert_eq!(resp.status(), StatusCode::OK); + + let json = body_json(resp).await; + assert_eq!(json["is_valid"], false); + assert!(!json["errors"].as_array().unwrap().is_empty()); + + // Each error has code, message, path + let error = &json["errors"][0]; + assert!(error["code"].is_string()); + assert!(error["message"].is_string()); + assert!(error["path"].is_string()); +} + +#[tokio::test] +async fn validate_no_schema_500() { + let router = app(MockExecutor::new(), false); + let body = serde_json::json!({ "clauses": [] }); + let req = json_request("POST", "/query/validate", body); + let resp = router.oneshot(req).await.unwrap(); + assert_eq!(resp.status(), StatusCode::INTERNAL_SERVER_ERROR); + + let json = body_json(resp).await; + assert_eq!(json["error"]["code"], "SCHEMA_ERROR"); +} + +// ── Interceptor via HTTP tests ─────────────────────────────────────── + +#[tokio::test] +async fn query_with_counting_interceptor() { + let count = Arc::new(AtomicUsize::new(0)); + let pipeline = Arc::new( + type_bridge_server::pipeline::PipelineBuilder::new(MockExecutor::new()) + .with_interceptor(CountingInterceptor { + name: "counter".into(), + count: count.clone(), + }) + .build() + .unwrap(), + ); + let router = create_router(pipeline); + + let body = serde_json::json!({ + "transaction_type": "read", + "clauses": [] + }); + let req = json_request("POST", "/query", body); + let resp = router.oneshot(req).await.unwrap(); + assert_eq!(resp.status(), StatusCode::OK); + + let json = body_json(resp).await; + assert_eq!(json["metadata"]["interceptors_applied"], serde_json::json!(["counter"])); + assert_eq!(count.load(Ordering::SeqCst), 1); +} + +#[tokio::test] +async fn query_with_audit_interceptor() { + let dir = tempfile::tempdir().unwrap(); + let path = dir.path().join("audit.jsonl"); + + let config = type_bridge_server::config::AuditLogConfig { + output: "file".into(), + file_path: path.to_str().unwrap().to_string(), + }; + let audit = type_bridge_server::interceptor::audit_log::AuditLogInterceptor::new(&config) + .unwrap(); + + let pipeline = Arc::new( + type_bridge_server::pipeline::PipelineBuilder::new(MockExecutor::new()) + .with_interceptor(audit) + .build() + .unwrap(), + ); + let router = create_router(pipeline); + + let body = serde_json::json!({ + "transaction_type": "read", + "clauses": [] + }); + let req = json_request("POST", "/query", body); + let resp = router.oneshot(req).await.unwrap(); + assert_eq!(resp.status(), StatusCode::OK); + + // Verify audit entry was written + let content = std::fs::read_to_string(&path).unwrap(); + let entry: serde_json::Value = serde_json::from_str(content.trim()).unwrap(); + assert_eq!(entry["status"], "ok"); + assert!(entry["request_id"].is_string()); +} + +// ── Routing tests ──────────────────────────────────────────────────── + +#[tokio::test] +async fn unknown_route_404() { + let router = app(MockExecutor::new(), false); + let req = Request::builder() + .uri("/nonexistent") + .body(Body::empty()) + .unwrap(); + let resp = router.oneshot(req).await.unwrap(); + assert_eq!(resp.status(), StatusCode::NOT_FOUND); +} + +#[tokio::test] +async fn get_on_query_405() { + let router = app(MockExecutor::new(), false); + let req = Request::builder() + .method("GET") + .uri("/query") + .body(Body::empty()) + .unwrap(); + let resp = router.oneshot(req).await.unwrap(); + assert_eq!(resp.status(), StatusCode::METHOD_NOT_ALLOWED); +} diff --git a/type-bridge-core/crates/server/tests/interceptor_integration_tests.rs b/type-bridge-core/crates/server/tests/interceptor_integration_tests.rs new file mode 100644 index 0000000..66d5e84 --- /dev/null +++ b/type-bridge-core/crates/server/tests/interceptor_integration_tests.rs @@ -0,0 +1,468 @@ +//! Interceptor chain integration tests through the full pipeline. +//! +//! Tests interceptor ordering, metadata propagation, audit log behavior, +//! and multi-interceptor interactions via `QueryPipeline`. + +use std::collections::HashMap; +use std::future::Future; +use std::pin::Pin; +use std::sync::atomic::{AtomicUsize, Ordering}; +use std::sync::Arc; + +use type_bridge_core_lib::ast::Clause; +use type_bridge_server::config::AuditLogConfig; +use type_bridge_server::interceptor::audit_log::AuditLogInterceptor; +use type_bridge_server::interceptor::{InterceptError, Interceptor, RequestContext}; +use type_bridge_server::pipeline::{PipelineBuilder, QueryInput}; +use type_bridge_server::test_helpers::MockExecutor; + +// ── Helper interceptors ────────────────────────────────────────────── + +struct OrderTrackingInterceptor { + name: String, + order: Arc>>, +} + +impl Interceptor for OrderTrackingInterceptor { + fn name(&self) -> &str { + &self.name + } + + fn on_request<'a>( + &'a self, + clauses: Vec, + _ctx: &'a mut RequestContext, + ) -> Pin, InterceptError>> + Send + 'a>> { + Box::pin(async move { + self.order + .lock() + .unwrap() + .push(format!("req:{}", self.name)); + Ok(clauses) + }) + } + + fn on_response<'a>( + &'a self, + _result: &'a serde_json::Value, + _ctx: &'a RequestContext, + ) -> Pin> + Send + 'a>> { + Box::pin(async move { + self.order + .lock() + .unwrap() + .push(format!("resp:{}", self.name)); + Ok(()) + }) + } +} + +struct MetadataWriterInterceptor { + key: String, + value: serde_json::Value, +} + +impl Interceptor for MetadataWriterInterceptor { + fn name(&self) -> &str { + "metadata-writer" + } + + fn on_request<'a>( + &'a self, + clauses: Vec, + ctx: &'a mut RequestContext, + ) -> Pin, InterceptError>> + Send + 'a>> { + Box::pin(async move { + ctx.metadata.insert(self.key.clone(), self.value.clone()); + Ok(clauses) + }) + } +} + +struct MetadataReaderInterceptor { + key: String, + found: Arc>>, +} + +impl Interceptor for MetadataReaderInterceptor { + fn name(&self) -> &str { + "metadata-reader" + } + + fn on_request<'a>( + &'a self, + clauses: Vec, + ctx: &'a mut RequestContext, + ) -> Pin, InterceptError>> + Send + 'a>> { + Box::pin(async move { + let val = ctx.metadata.get(&self.key).cloned(); + *self.found.lock().unwrap() = val; + Ok(clauses) + }) + } +} + +struct CountingInterceptor { + name: String, + request_count: Arc, + response_count: Arc, +} + +impl CountingInterceptor { + fn new(name: &str) -> Self { + Self { + name: name.to_string(), + request_count: Arc::new(AtomicUsize::new(0)), + response_count: Arc::new(AtomicUsize::new(0)), + } + } +} + +impl Interceptor for CountingInterceptor { + fn name(&self) -> &str { + &self.name + } + + fn on_request<'a>( + &'a self, + clauses: Vec, + _ctx: &'a mut RequestContext, + ) -> Pin, InterceptError>> + Send + 'a>> { + Box::pin(async move { + self.request_count.fetch_add(1, Ordering::SeqCst); + Ok(clauses) + }) + } + + fn on_response<'a>( + &'a self, + _result: &'a serde_json::Value, + _ctx: &'a RequestContext, + ) -> Pin> + Send + 'a>> { + Box::pin(async move { + self.response_count.fetch_add(1, Ordering::SeqCst); + Ok(()) + }) + } +} + +struct RejectingRequestInterceptor; + +impl Interceptor for RejectingRequestInterceptor { + fn name(&self) -> &str { + "rejector" + } + + fn on_request<'a>( + &'a self, + _clauses: Vec, + _ctx: &'a mut RequestContext, + ) -> Pin, InterceptError>> + Send + 'a>> { + Box::pin(async { + Err(InterceptError::AccessDenied { + reason: "denied".into(), + }) + }) + } +} + +// ── Helpers ────────────────────────────────────────────────────────── + +fn make_query_input() -> QueryInput { + QueryInput { + database: None, + transaction_type: "read".to_string(), + clauses: vec![], + metadata: HashMap::new(), + } +} + +// ── Tests: Chain ordering ──────────────────────────────────────────── + +#[tokio::test] +async fn chain_three_interceptors_request_forward_response_reverse() { + let order = Arc::new(std::sync::Mutex::new(Vec::new())); + + let pipeline = PipelineBuilder::new(MockExecutor::new()) + .with_interceptor(OrderTrackingInterceptor { + name: "first".into(), + order: order.clone(), + }) + .with_interceptor(OrderTrackingInterceptor { + name: "second".into(), + order: order.clone(), + }) + .with_interceptor(OrderTrackingInterceptor { + name: "third".into(), + order: order.clone(), + }) + .build() + .unwrap(); + + pipeline.execute_query(make_query_input()).await.unwrap(); + + let calls = order.lock().unwrap(); + assert_eq!( + *calls, + vec![ + "req:first", + "req:second", + "req:third", + "resp:third", + "resp:second", + "resp:first", + ] + ); +} + +#[tokio::test] +async fn first_rejects_others_skipped() { + let second = CountingInterceptor::new("second"); + let third = CountingInterceptor::new("third"); + let second_req = second.request_count.clone(); + let third_req = third.request_count.clone(); + + let pipeline = PipelineBuilder::new(MockExecutor::new()) + .with_interceptor(RejectingRequestInterceptor) + .with_interceptor(second) + .with_interceptor(third) + .build() + .unwrap(); + + let result = pipeline.execute_query(make_query_input()).await; + assert!(result.is_err()); + assert_eq!(second_req.load(Ordering::SeqCst), 0); + assert_eq!(third_req.load(Ordering::SeqCst), 0); +} + +#[tokio::test] +async fn middle_rejects_later_skipped_earlier_ran() { + let first = CountingInterceptor::new("first"); + let third = CountingInterceptor::new("third"); + let first_req = first.request_count.clone(); + let third_req = third.request_count.clone(); + + let pipeline = PipelineBuilder::new(MockExecutor::new()) + .with_interceptor(first) + .with_interceptor(RejectingRequestInterceptor) + .with_interceptor(third) + .build() + .unwrap(); + + let result = pipeline.execute_query(make_query_input()).await; + assert!(result.is_err()); + assert_eq!(first_req.load(Ordering::SeqCst), 1); // first ran + assert_eq!(third_req.load(Ordering::SeqCst), 0); // third skipped +} + +// ── Tests: Metadata propagation ────────────────────────────────────── + +#[tokio::test] +async fn interceptor_adds_metadata_downstream_sees_it() { + let found = Arc::new(std::sync::Mutex::new(None)); + + let pipeline = PipelineBuilder::new(MockExecutor::new()) + .with_interceptor(MetadataWriterInterceptor { + key: "tenant_id".into(), + value: serde_json::json!("acme-corp"), + }) + .with_interceptor(MetadataReaderInterceptor { + key: "tenant_id".into(), + found: found.clone(), + }) + .build() + .unwrap(); + + pipeline.execute_query(make_query_input()).await.unwrap(); + + let val = found.lock().unwrap().clone(); + assert_eq!(val, Some(serde_json::json!("acme-corp"))); +} + +// ── Tests: Audit log through pipeline ──────────────────────────────── + +#[tokio::test] +async fn audit_log_writes_complete_entry_to_file() { + let dir = tempfile::tempdir().unwrap(); + let path = dir.path().join("audit.jsonl"); + + let config = AuditLogConfig { + output: "file".into(), + file_path: path.to_str().unwrap().to_string(), + }; + let audit = AuditLogInterceptor::new(&config).unwrap(); + + let pipeline = PipelineBuilder::new(MockExecutor::new()) + .with_interceptor(audit) + .build() + .unwrap(); + + pipeline.execute_query(make_query_input()).await.unwrap(); + + let content = std::fs::read_to_string(&path).unwrap(); + let entry: serde_json::Value = serde_json::from_str(content.trim()).unwrap(); + assert_eq!(entry["status"], "ok"); + assert!(entry["request_id"].is_string()); + assert!(entry["database"].is_string()); + assert!(entry["timestamp"].is_string()); +} + +#[tokio::test] +async fn audit_log_appends_multiple_entries() { + let dir = tempfile::tempdir().unwrap(); + let path = dir.path().join("audit.jsonl"); + + let config = AuditLogConfig { + output: "file".into(), + file_path: path.to_str().unwrap().to_string(), + }; + let audit = AuditLogInterceptor::new(&config).unwrap(); + + let pipeline = PipelineBuilder::new(MockExecutor::new()) + .with_interceptor(audit) + .build() + .unwrap(); + + pipeline.execute_query(make_query_input()).await.unwrap(); + pipeline.execute_query(make_query_input()).await.unwrap(); + pipeline.execute_query(make_query_input()).await.unwrap(); + + let content = std::fs::read_to_string(&path).unwrap(); + let lines: Vec<&str> = content.trim().lines().collect(); + assert_eq!(lines.len(), 3); + + // Each line should be valid JSON + for line in &lines { + let entry: serde_json::Value = serde_json::from_str(line).unwrap(); + assert_eq!(entry["status"], "ok"); + } +} + +#[tokio::test] +async fn audit_log_records_clause_count() { + let dir = tempfile::tempdir().unwrap(); + let path = dir.path().join("audit.jsonl"); + + let config = AuditLogConfig { + output: "file".into(), + file_path: path.to_str().unwrap().to_string(), + }; + let audit = AuditLogInterceptor::new(&config).unwrap(); + + let pipeline = PipelineBuilder::new(MockExecutor::new()) + .with_interceptor(audit) + .build() + .unwrap(); + + // Execute with 2 clauses + let input = QueryInput { + database: None, + transaction_type: "read".to_string(), + clauses: vec![Clause::Match(vec![]), Clause::Fetch(vec![])], + metadata: HashMap::new(), + }; + pipeline.execute_query(input).await.unwrap(); + + let content = std::fs::read_to_string(&path).unwrap(); + let entry: serde_json::Value = serde_json::from_str(content.trim()).unwrap(); + assert_eq!(entry["clause_count"], 2); +} + +// ── Tests: Pipeline with multiple interceptors ─────────────────────── + +#[tokio::test] +async fn pipeline_with_multiple_interceptors_ordering() { + let count1 = CountingInterceptor::new("counter1"); + let count2 = CountingInterceptor::new("counter2"); + let req1 = count1.request_count.clone(); + let resp1 = count1.response_count.clone(); + let req2 = count2.request_count.clone(); + let resp2 = count2.response_count.clone(); + + let pipeline = PipelineBuilder::new(MockExecutor::new()) + .with_interceptor(count1) + .with_interceptor(count2) + .build() + .unwrap(); + + let output = pipeline.execute_query(make_query_input()).await.unwrap(); + + assert_eq!(req1.load(Ordering::SeqCst), 1); + assert_eq!(req2.load(Ordering::SeqCst), 1); + assert_eq!(resp1.load(Ordering::SeqCst), 1); + assert_eq!(resp2.load(Ordering::SeqCst), 1); + assert_eq!( + output.interceptors_applied, + vec!["counter1", "counter2"] + ); +} + +#[tokio::test] +async fn pipeline_request_metadata_propagates_to_response_interceptors() { + let dir = tempfile::tempdir().unwrap(); + let path = dir.path().join("audit.jsonl"); + + let config = AuditLogConfig { + output: "file".into(), + file_path: path.to_str().unwrap().to_string(), + }; + let audit = AuditLogInterceptor::new(&config).unwrap(); + + // Metadata writer runs first, sets a value. + // Audit log runs second, stores clause_count in metadata and writes in on_response. + let pipeline = PipelineBuilder::new(MockExecutor::new()) + .with_interceptor(MetadataWriterInterceptor { + key: "custom_marker".into(), + value: serde_json::json!("test-value"), + }) + .with_interceptor(audit) + .build() + .unwrap(); + + let output = pipeline.execute_query(make_query_input()).await.unwrap(); + + assert_eq!( + output.interceptors_applied, + vec!["metadata-writer", "audit-log"] + ); + + // Audit entry should have been written + let content = std::fs::read_to_string(&path).unwrap(); + let entry: serde_json::Value = serde_json::from_str(content.trim()).unwrap(); + assert_eq!(entry["status"], "ok"); +} + +#[tokio::test] +async fn pipeline_with_audit_interceptor_full_flow() { + let dir = tempfile::tempdir().unwrap(); + let path = dir.path().join("audit.jsonl"); + + let config = AuditLogConfig { + output: "file".into(), + file_path: path.to_str().unwrap().to_string(), + }; + let audit = AuditLogInterceptor::new(&config).unwrap(); + + let executor = MockExecutor::with_result(serde_json::json!({"data": [1, 2, 3]})); + let calls = executor.calls.clone(); + + let pipeline = PipelineBuilder::new(executor) + .with_default_database("test_db") + .with_interceptor(audit) + .build() + .unwrap(); + + let output = pipeline.execute_query(make_query_input()).await.unwrap(); + + // Verify execution happened + assert_eq!(output.results, serde_json::json!({"data": [1, 2, 3]})); + let recorded = calls.lock().unwrap(); + assert_eq!(recorded.len(), 1); + assert_eq!(recorded[0].0, "test_db"); + + // Verify audit log was written + let content = std::fs::read_to_string(&path).unwrap(); + let entry: serde_json::Value = serde_json::from_str(content.trim()).unwrap(); + assert_eq!(entry["database"], "test_db"); + assert_eq!(entry["status"], "ok"); +} diff --git a/type-bridge-core/crates/server/tests/pipeline_integration_tests.rs b/type-bridge-core/crates/server/tests/pipeline_integration_tests.rs new file mode 100644 index 0000000..612ad1c --- /dev/null +++ b/type-bridge-core/crates/server/tests/pipeline_integration_tests.rs @@ -0,0 +1,382 @@ +//! End-to-end pipeline integration tests using MockExecutor. +//! +//! Tests the full validate → intercept → compile → execute → intercept flow +//! from an external test crate perspective. + +use std::collections::HashMap; +use std::future::Future; +use std::pin::Pin; +use std::sync::atomic::{AtomicUsize, Ordering}; +use std::sync::Arc; + +use type_bridge_core_lib::ast::{Clause, Constraint, LiteralValue, Pattern, Value}; +use type_bridge_server::error::PipelineError; +use type_bridge_server::interceptor::{InterceptError, Interceptor, RequestContext}; +use type_bridge_server::pipeline::{PipelineBuilder, QueryInput, ValidateInput}; +use type_bridge_server::schema_source::InMemorySchemaSource; +use type_bridge_server::test_helpers::{make_pipeline, make_simple_clauses, MockExecutor}; + +// ── Helper interceptors ────────────────────────────────────────────── + +struct PassthroughInterceptor { + name: String, +} + +impl Interceptor for PassthroughInterceptor { + fn name(&self) -> &str { + &self.name + } + fn on_request<'a>( + &'a self, + clauses: Vec, + _ctx: &'a mut RequestContext, + ) -> Pin, InterceptError>> + Send + 'a>> { + Box::pin(async move { Ok(clauses) }) + } +} + +struct CountingInterceptor { + name: String, + request_count: Arc, +} + +impl Interceptor for CountingInterceptor { + fn name(&self) -> &str { + &self.name + } + fn on_request<'a>( + &'a self, + clauses: Vec, + _ctx: &'a mut RequestContext, + ) -> Pin, InterceptError>> + Send + 'a>> { + Box::pin(async move { + self.request_count.fetch_add(1, Ordering::SeqCst); + Ok(clauses) + }) + } +} + +struct RejectingRequestInterceptor; + +impl Interceptor for RejectingRequestInterceptor { + fn name(&self) -> &str { + "rejector" + } + fn on_request<'a>( + &'a self, + _clauses: Vec, + _ctx: &'a mut RequestContext, + ) -> Pin, InterceptError>> + Send + 'a>> { + Box::pin(async { + Err(InterceptError::AccessDenied { + reason: "test rejection".into(), + }) + }) + } +} + +struct RejectingResponseInterceptor; + +impl Interceptor for RejectingResponseInterceptor { + fn name(&self) -> &str { + "resp-rejector" + } + fn on_request<'a>( + &'a self, + clauses: Vec, + _ctx: &'a mut RequestContext, + ) -> Pin, InterceptError>> + Send + 'a>> { + Box::pin(async move { Ok(clauses) }) + } + fn on_response<'a>( + &'a self, + _result: &'a serde_json::Value, + _ctx: &'a RequestContext, + ) -> Pin> + Send + 'a>> { + Box::pin(async { Err(InterceptError::Internal("response rejected".into())) }) + } +} + +struct MetadataInterceptor; + +impl Interceptor for MetadataInterceptor { + fn name(&self) -> &str { + "metadata" + } + fn on_request<'a>( + &'a self, + clauses: Vec, + ctx: &'a mut RequestContext, + ) -> Pin, InterceptError>> + Send + 'a>> { + Box::pin(async move { + ctx.metadata + .insert("custom_marker".into(), serde_json::json!("set-by-interceptor")); + Ok(clauses) + }) + } +} + +// ── Helpers ────────────────────────────────────────────────────────── + +fn make_query_input(clauses: Vec) -> QueryInput { + QueryInput { + database: None, + transaction_type: "read".to_string(), + clauses, + metadata: HashMap::new(), + } +} + +fn make_query_input_with_db(clauses: Vec, db: &str) -> QueryInput { + QueryInput { + database: Some(db.to_string()), + transaction_type: "read".to_string(), + clauses, + metadata: HashMap::new(), + } +} + +fn make_invalid_clauses() -> Vec { + vec![Clause::Match(vec![Pattern::Entity { + variable: "p".to_string(), + type_name: "person".to_string(), + constraints: vec![Constraint::Has { + attr_name: "nonexistent_attr".to_string(), + value: Value::Literal(LiteralValue { + value: serde_json::json!("val"), + value_type: "string".to_string(), + }), + }], + is_strict: false, + }])] +} + +// ── Execute query tests ────────────────────────────────────────────── + +#[tokio::test] +async fn execute_valid_query_returns_results() { + let executor = MockExecutor::with_result(serde_json::json!({"data": [1, 2, 3]})); + let pipeline = make_pipeline(executor, false); + + let output = pipeline.execute_query(make_query_input(vec![])).await.unwrap(); + assert_eq!(output.results, serde_json::json!({"data": [1, 2, 3]})); + assert!(!output.request_id.is_empty()); +} + +#[tokio::test] +async fn execute_with_schema_validation_passes() { + let pipeline = make_pipeline(MockExecutor::new(), true); + let input = make_query_input(make_simple_clauses()); + let result = pipeline.execute_query(input).await; + assert!(result.is_ok()); +} + +#[tokio::test] +async fn execute_with_schema_validation_rejects_invalid() { + let pipeline = make_pipeline(MockExecutor::new(), true); + let input = make_query_input(make_invalid_clauses()); + let result = pipeline.execute_query(input).await; + assert!(result.is_err()); + let err = result.unwrap_err(); + assert!(matches!(&err, PipelineError::Validation(_))); +} + +#[tokio::test] +async fn execute_without_schema_skips_validation() { + let pipeline = make_pipeline(MockExecutor::new(), false); + // Invalid clauses should pass because there's no schema to validate against + let input = make_query_input(make_invalid_clauses()); + let result = pipeline.execute_query(input).await; + assert!(result.is_ok()); +} + +#[tokio::test] +async fn skip_validation_allows_invalid_queries() { + let pipeline = PipelineBuilder::new(MockExecutor::new()) + .with_schema_source(InMemorySchemaSource::new( + type_bridge_server::test_helpers::SIMPLE_SCHEMA, + )) + .with_default_database("test_db") + .with_skip_validation() + .build() + .unwrap(); + + // Schema is loaded but validation is skipped + assert!(pipeline.schema().is_some()); + let input = make_query_input(make_invalid_clauses()); + let result = pipeline.execute_query(input).await; + assert!(result.is_ok()); +} + +#[tokio::test] +async fn skip_validation_schema_still_accessible() { + let pipeline = PipelineBuilder::new(MockExecutor::new()) + .with_schema_source(InMemorySchemaSource::new( + type_bridge_server::test_helpers::SIMPLE_SCHEMA, + )) + .with_skip_validation() + .build() + .unwrap(); + + let schema = pipeline.schema().unwrap(); + assert!(schema.entities.contains_key("person")); +} + +#[tokio::test] +async fn uses_input_database_when_provided() { + let executor = MockExecutor::new(); + let calls = executor.calls.clone(); + let pipeline = make_pipeline(executor, false); + + let input = make_query_input_with_db(vec![], "custom_db"); + pipeline.execute_query(input).await.unwrap(); + + let recorded = calls.lock().unwrap(); + assert_eq!(recorded[0].0, "custom_db"); +} + +#[tokio::test] +async fn uses_default_database_when_none() { + let executor = MockExecutor::new(); + let calls = executor.calls.clone(); + let pipeline = make_pipeline(executor, false); + + let input = make_query_input(vec![]); + pipeline.execute_query(input).await.unwrap(); + + let recorded = calls.lock().unwrap(); + assert_eq!(recorded[0].0, "test_db"); +} + +// ── Interceptor tests ──────────────────────────────────────────────── + +#[tokio::test] +async fn single_interceptor_modifies_context() { + let pipeline = PipelineBuilder::new(MockExecutor::new()) + .with_interceptor(MetadataInterceptor) + .build() + .unwrap(); + + let output = pipeline.execute_query(make_query_input(vec![])).await.unwrap(); + assert_eq!(output.interceptors_applied, vec!["metadata"]); +} + +#[tokio::test] +async fn multiple_interceptors_execute_in_order() { + let count1 = Arc::new(AtomicUsize::new(0)); + let count2 = Arc::new(AtomicUsize::new(0)); + + let pipeline = PipelineBuilder::new(MockExecutor::new()) + .with_interceptor(CountingInterceptor { + name: "first".into(), + request_count: count1.clone(), + }) + .with_interceptor(CountingInterceptor { + name: "second".into(), + request_count: count2.clone(), + }) + .build() + .unwrap(); + + let output = pipeline.execute_query(make_query_input(vec![])).await.unwrap(); + assert_eq!(output.interceptors_applied, vec!["first", "second"]); + assert_eq!(count1.load(Ordering::SeqCst), 1); + assert_eq!(count2.load(Ordering::SeqCst), 1); +} + +#[tokio::test] +async fn request_interceptor_rejection_prevents_execution() { + let executor = MockExecutor::new(); + let calls = executor.calls.clone(); + + let pipeline = PipelineBuilder::new(executor) + .with_interceptor(RejectingRequestInterceptor) + .build() + .unwrap(); + + let result = pipeline.execute_query(make_query_input(vec![])).await; + assert!(result.is_err()); + assert!(matches!(result.unwrap_err(), PipelineError::Interceptor(_))); + + // Executor should not have been called + let recorded = calls.lock().unwrap(); + assert!(recorded.is_empty()); +} + +#[tokio::test] +async fn response_interceptor_failure_returns_error() { + let pipeline = PipelineBuilder::new(MockExecutor::new()) + .with_interceptor(RejectingResponseInterceptor) + .build() + .unwrap(); + + let result = pipeline.execute_query(make_query_input(vec![])).await; + assert!(result.is_err()); + let err = result.unwrap_err(); + assert!(matches!(&err, PipelineError::Interceptor(msg) if msg.contains("response rejected"))); +} + +#[tokio::test] +async fn interceptor_names_included_in_output() { + let pipeline = PipelineBuilder::new(MockExecutor::new()) + .with_interceptor(PassthroughInterceptor { + name: "alpha".into(), + }) + .with_interceptor(PassthroughInterceptor { + name: "beta".into(), + }) + .with_interceptor(PassthroughInterceptor { + name: "gamma".into(), + }) + .build() + .unwrap(); + + let output = pipeline.execute_query(make_query_input(vec![])).await.unwrap(); + assert_eq!( + output.interceptors_applied, + vec!["alpha", "beta", "gamma"] + ); +} + +// ── Executor error tests ───────────────────────────────────────────── + +#[tokio::test] +async fn executor_error_propagates() { + let pipeline = make_pipeline(MockExecutor::failing("database crashed"), false); + let result = pipeline.execute_query(make_query_input(vec![])).await; + let err = result.unwrap_err(); + assert!(matches!(&err, PipelineError::QueryExecution(msg) if msg.contains("database crashed"))); +} + +// ── Validate-only tests ────────────────────────────────────────────── + +#[tokio::test] +async fn validate_only_with_schema_valid() { + let pipeline = make_pipeline(MockExecutor::new(), true); + let input = ValidateInput { + clauses: make_simple_clauses(), + }; + let result = pipeline.validate(&input).unwrap(); + assert!(result.is_valid); + assert!(result.errors.is_empty()); +} + +#[tokio::test] +async fn validate_only_with_schema_invalid() { + let pipeline = make_pipeline(MockExecutor::new(), true); + let input = ValidateInput { + clauses: make_invalid_clauses(), + }; + let result = pipeline.validate(&input).unwrap(); + assert!(!result.is_valid); + assert!(!result.errors.is_empty()); +} + +#[tokio::test] +async fn validate_only_without_schema_errors() { + let pipeline = make_pipeline(MockExecutor::new(), false); + let input = ValidateInput { clauses: vec![] }; + let result = pipeline.validate(&input); + assert!(result.is_err()); + assert!(matches!(result.unwrap_err(), PipelineError::Schema(_))); +} diff --git a/type-bridge-core/pyproject.toml b/type-bridge-core/pyproject.toml index 224f1e3..405f734 100644 --- a/type-bridge-core/pyproject.toml +++ b/type-bridge-core/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "maturin" [project] name = "type-bridge-core" -version = "1.4.0" +version = "1.4.1" description = "Rust core for type-bridge ORM" requires-python = ">=3.13" classifiers = [ diff --git a/type_bridge/__init__.py b/type_bridge/__init__.py index 70bb4fd..96e8893 100644 --- a/type_bridge/__init__.py +++ b/type_bridge/__init__.py @@ -20,7 +20,10 @@ Unique, ) from type_bridge.crud import ( + CrudEvent, + CrudHook, EntityNotFoundError, + HookCancelled, KeyAttributeError, NotUniqueError, RelationNotFoundError, @@ -48,7 +51,7 @@ from type_bridge.session import Connection, Database, TransactionContext from type_bridge.typedb_driver import Credentials, TransactionType, TypeDB -__version__ = "1.4.0" +__version__ = "1.4.1" __all__ = [ # Database and session @@ -90,6 +93,10 @@ "QueryBuilder", # CRUD "TypeDBManager", + # Hooks + "CrudEvent", + "CrudHook", + "HookCancelled", # Proxy "ProxyDatabase", "ProxyError", diff --git a/type_bridge/crud/__init__.py b/type_bridge/crud/__init__.py index eb522b0..ffaf2b9 100644 --- a/type_bridge/crud/__init__.py +++ b/type_bridge/crud/__init__.py @@ -13,6 +13,7 @@ NotUniqueError, RelationNotFoundError, ) +from .hooks import CrudEvent, CrudHook, HookCancelled from .strategies import EntityStrategy, ModelStrategy, RelationStrategy from .typedb_manager import GroupByQuery, TypeDBManager, TypeDBQuery @@ -25,6 +26,10 @@ "ModelStrategy", "EntityStrategy", "RelationStrategy", + # Hooks + "CrudEvent", + "CrudHook", + "HookCancelled", # Exceptions "NotFoundError", "EntityNotFoundError", diff --git a/type_bridge/crud/hooks.py b/type_bridge/crud/hooks.py new file mode 100644 index 0000000..ccd2242 --- /dev/null +++ b/type_bridge/crud/hooks.py @@ -0,0 +1,152 @@ +"""Lifecycle hook system for CRUD operations. + +Hooks are duck-typed classes that implement only the methods they care about. +Register them on a manager instance via ``manager.add_hook(hook)``. + +Pre-hooks run in registration order and may raise ``HookCancelled`` to abort. +Post-hooks run in reverse registration order; errors are logged, not propagated. +""" + +from __future__ import annotations + +import logging +from enum import Enum +from typing import TYPE_CHECKING, Any, Protocol, runtime_checkable + +if TYPE_CHECKING: + from type_bridge.models.base import TypeDBType + +logger = logging.getLogger(__name__) + + +class CrudEvent(Enum): + """CRUD lifecycle events.""" + + PRE_INSERT = "pre_insert" + POST_INSERT = "post_insert" + PRE_UPDATE = "pre_update" + POST_UPDATE = "post_update" + PRE_DELETE = "pre_delete" + POST_DELETE = "post_delete" + PRE_PUT = "pre_put" + POST_PUT = "post_put" + + +class HookCancelled(Exception): # noqa: N818 — not an error, a control-flow signal + """Raise in a pre-hook to abort the operation. + + Attributes: + reason: Human-readable explanation. + event: The event that was cancelled (set by HookRunner). + hook: The hook instance that raised the cancellation (set by HookRunner). + """ + + def __init__( + self, + reason: str = "", + *, + event: CrudEvent | None = None, + hook: Any = None, + ): + self.reason = reason + self.event = event + self.hook = hook + super().__init__(reason) + + +@runtime_checkable +class CrudHook(Protocol): + """Protocol for CRUD lifecycle hooks. + + Implement only the methods you need. All methods are optional — + ``HookRunner`` uses ``hasattr`` / ``getattr`` to discover them. + """ + + def should_run(self, event: CrudEvent, sender: type[TypeDBType]) -> bool: ... + def pre_insert(self, sender: type[TypeDBType], instance: Any) -> None: ... + def post_insert(self, sender: type[TypeDBType], instance: Any) -> None: ... + def pre_update(self, sender: type[TypeDBType], instance: Any) -> None: ... + def post_update(self, sender: type[TypeDBType], instance: Any) -> None: ... + def pre_delete(self, sender: type[TypeDBType], instance: Any) -> None: ... + def post_delete(self, sender: type[TypeDBType], instance: Any) -> None: ... + def pre_put(self, sender: type[TypeDBType], instance: Any) -> None: ... + def post_put(self, sender: type[TypeDBType], instance: Any) -> None: ... + + +class HookRunner: + """Manages hook registration and execution. + + Pre-hooks run in registration order. + Post-hooks run in reverse registration order (middleware unwinding). + """ + + __slots__ = ("_hooks",) + + def __init__(self) -> None: + self._hooks: list[Any] = [] + + @property + def has_hooks(self) -> bool: + """Fast guard — skip all hook logic when the list is empty.""" + return len(self._hooks) > 0 + + def add(self, hook: Any) -> None: + """Register a hook.""" + self._hooks.append(hook) + + def remove(self, hook: Any) -> None: + """Unregister a hook. + + Raises ``ValueError`` if the hook is not registered. + """ + self._hooks.remove(hook) + + def run_pre(self, event: CrudEvent, sender: type, instance: Any) -> None: + """Run pre-hooks in registration order. + + Raises ``HookCancelled`` if any hook cancels the operation. + """ + method_name = event.value # e.g. "pre_insert" + for hook in self._hooks: + if not self._should_run(hook, event, sender): + continue + method = getattr(hook, method_name, None) + if method is not None: + try: + method(sender, instance) + except HookCancelled as exc: + # Enrich with context if not already set + if exc.event is None: + exc.event = event + if exc.hook is None: + exc.hook = hook + raise + + def run_post(self, event: CrudEvent, sender: type, instance: Any) -> None: + """Run post-hooks in reverse registration order. + + Errors are logged but do **not** propagate. + """ + method_name = event.value # e.g. "post_insert" + for hook in reversed(self._hooks): + if not self._should_run(hook, event, sender): + continue + method = getattr(hook, method_name, None) + if method is not None: + try: + method(sender, instance) + except Exception: + logger.exception( + "Post-hook %r failed for %s on %s", + hook, + event.value, + sender.__name__, + ) + + @staticmethod + def _should_run(hook: Any, event: CrudEvent, sender: type) -> bool: + """Check if *hook* wants to run for this event/sender combination.""" + should_run_method = getattr(hook, "should_run", None) + if should_run_method is not None: + return should_run_method(event, sender) + return True diff --git a/type_bridge/crud/typedb_manager.py b/type_bridge/crud/typedb_manager.py index ab07924..5eee9da 100644 --- a/type_bridge/crud/typedb_manager.py +++ b/type_bridge/crud/typedb_manager.py @@ -8,11 +8,12 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any, cast +from typing import TYPE_CHECKING, Any, Self, cast from typedb.driver import TransactionType from type_bridge.crud.formatting import format_value +from type_bridge.crud.hooks import CrudEvent, HookRunner from type_bridge.crud.strategies import EntityStrategy, ModelStrategy, RelationStrategy from type_bridge.crud.types import is_multi_value_attribute from type_bridge.models import Entity, Relation @@ -49,6 +50,8 @@ def __init__(self, connection: Connection, model_class: type[T]): self.model_class = model_class self.compiler = QueryCompiler() + self._hook_runner = HookRunner() + # Select strategy if issubclass(model_class, Entity): self.strategy: ModelStrategy = EntityStrategy() @@ -57,6 +60,15 @@ def __init__(self, connection: Connection, model_class: type[T]): else: raise TypeError(f"Unsupported model type: {model_class}") + def add_hook(self, hook: Any) -> Self: + """Register a lifecycle hook. Returns self for chaining.""" + self._hook_runner.add(hook) + return self + + def remove_hook(self, hook: Any) -> None: + """Unregister a lifecycle hook.""" + self._hook_runner.remove(hook) + def _execute(self, query: str, tx_type: TransactionType) -> list[dict[str, Any]]: return self._executor.execute(query, tx_type) @@ -271,6 +283,9 @@ def insert(self, instance: T) -> T: For relations, uses two roundtrips (insert, then fetch) because TypeDB 3.x relation inserts don't bind the variable. """ + if self._hook_runner.has_hooks: + self._hook_runner.run_pre(CrudEvent.PRE_INSERT, self.model_class, instance) + var = "$x" # Relations use include_variable=False in to_ast(), so $x isn't bound @@ -283,6 +298,9 @@ def insert(self, instance: T) -> T: query_parts.append(self.compiler.compile(insert_clause)) self._execute("\n".join(query_parts), TransactionType.WRITE) self._fetch_and_set_iid(instance, var) + + if self._hook_runner.has_hooks: + self._hook_runner.run_post(CrudEvent.POST_INSERT, self.model_class, instance) return instance # Entities: Combined insert + fetch IID in single query @@ -294,6 +312,9 @@ def insert(self, instance: T) -> T: # Fallback to separate fetch (for edge cases like types without keys) self._fetch_and_set_iid(instance, var) + if self._hook_runner.has_hooks: + self._hook_runner.run_post(CrudEvent.POST_INSERT, self.model_class, instance) + return instance def _fetch_and_set_iid(self, instance: T, var: str) -> None: @@ -768,6 +789,9 @@ def update(self, instance: T) -> T: Returns: The updated instance """ + if self._hook_runner.has_hooks: + self._hook_runner.run_pre(CrudEvent.PRE_UPDATE, self.model_class, instance) + var = "$x" constraints = self.strategy.identify(instance) all_attrs = self.model_class.get_all_attributes() @@ -876,10 +900,16 @@ def update(self, instance: T) -> T: self._execute(full_query, TransactionType.WRITE) logger.info(f"Updated: {self.model_class.__name__}") + if self._hook_runner.has_hooks: + self._hook_runner.run_post(CrudEvent.POST_UPDATE, self.model_class, instance) + return instance def delete(self, instance: T) -> T: """Delete an instance and return it.""" + if self._hook_runner.has_hooks: + self._hook_runner.run_pre(CrudEvent.PRE_DELETE, self.model_class, instance) + var = "$x" # Build AST-based match clause @@ -900,6 +930,10 @@ def delete(self, instance: T) -> T: query = f"{match_str}\n{delete_str}" self._execute(query, TransactionType.WRITE) + + if self._hook_runner.has_hooks: + self._hook_runner.run_post(CrudEvent.POST_DELETE, self.model_class, instance) + return instance def all(self) -> list[T]: @@ -918,9 +952,19 @@ def insert_many(self, instances: list[T]) -> list[T]: # Check if all instances are entities (can be batched) # Relations need individual handling due to role player match clauses if all(isinstance(inst, Entity) for inst in instances): - return self._batch_insert_entities(instances) + if self._hook_runner.has_hooks: + for instance in instances: + self._hook_runner.run_pre(CrudEvent.PRE_INSERT, self.model_class, instance) - # Fallback for relations or mixed types + result = self._batch_insert_entities(instances) + + if self._hook_runner.has_hooks: + for instance in result: + self._hook_runner.run_post(CrudEvent.POST_INSERT, self.model_class, instance) + + return result + + # Fallback for relations or mixed types (hooks fire via self.insert) for instance in instances: self.insert(instance) return instances @@ -993,6 +1037,9 @@ def put(self, instance: T) -> T: Uses TypeQL's PUT clause for idempotent insertion. For entities, uses a single roundtrip. For relations, uses two. """ + if self._hook_runner.has_hooks: + self._hook_runner.run_pre(CrudEvent.PRE_PUT, self.model_class, instance) + var = "$x" # Relations use include_variable=False in to_ast(), so $x isn't bound. @@ -1007,6 +1054,9 @@ def put(self, instance: T) -> T: query_parts.append(put_query) self._execute("\n".join(query_parts), TransactionType.WRITE) self._fetch_and_set_iid(instance, var) + + if self._hook_runner.has_hooks: + self._hook_runner.run_post(CrudEvent.POST_PUT, self.model_class, instance) return instance # Entities: Combined put + fetch IID in single query @@ -1018,6 +1068,9 @@ def put(self, instance: T) -> T: # Fallback to separate fetch (for edge cases like types without keys) self._fetch_and_set_iid(instance, var) + if self._hook_runner.has_hooks: + self._hook_runner.run_post(CrudEvent.POST_PUT, self.model_class, instance) + return instance def delete_many(self, instances: list[T], *, strict: bool = False) -> list[T]: @@ -1049,6 +1102,8 @@ def delete_many(self, instances: list[T], *, strict: bool = False) -> list[T]: else: without_iids.append(instance) + has_hooks = self._hook_runner.has_hooks + # For strict mode, we need to check existence before deleting if strict: # Batch check existence for instances with IIDs @@ -1066,10 +1121,16 @@ def delete_many(self, instances: list[T], *, strict: bool = False) -> list[T]: # All exist - proceed with batch delete deleted: list[T] = [] if with_iids: + if has_hooks: + for inst in with_iids: + self._hook_runner.run_pre(CrudEvent.PRE_DELETE, self.model_class, inst) self._batch_delete_by_iid(with_iids) + if has_hooks: + for inst in with_iids: + self._hook_runner.run_post(CrudEvent.POST_DELETE, self.model_class, inst) deleted.extend(with_iids) for inst in without_iids: - self.delete(inst) + self.delete(inst) # hooks fire inside self.delete() deleted.append(inst) return deleted @@ -1082,10 +1143,16 @@ def delete_many(self, instances: list[T], *, strict: bool = False) -> list[T]: existing_instances = [inst for inst in with_iids if inst._iid in existing_iids] if existing_instances: + if has_hooks: + for inst in existing_instances: + self._hook_runner.run_pre(CrudEvent.PRE_DELETE, self.model_class, inst) self._batch_delete_by_iid(existing_instances) + if has_hooks: + for inst in existing_instances: + self._hook_runner.run_post(CrudEvent.POST_DELETE, self.model_class, inst) deleted.extend(existing_instances) - # Handle instances without IIDs individually + # Handle instances without IIDs individually (hooks fire via self.delete) for inst in without_iids: if self._entity_exists(inst): self.delete(inst) @@ -1240,10 +1307,16 @@ def put_many(self, instances: list[T]) -> list[T]: if not instances: return instances + has_hooks = self._hook_runner.has_hooks + # Check if all instances are entities (can attempt batch) if all(isinstance(inst, Entity) for inst in instances): + if has_hooks: + for instance in instances: + self._hook_runner.run_pre(CrudEvent.PRE_PUT, self.model_class, instance) + try: - return self._batch_insert_entities(instances, use_put=True) + result = self._batch_insert_entities(instances, use_put=True) except Exception as e: # Check if this is a key constraint violation error_str = str(e) @@ -1251,14 +1324,22 @@ def put_many(self, instances: list[T]) -> list[T]: logger.debug( f"Batch put failed with constraint violation, falling back to individual: {e}" ) - # Fall back to individual operations + # Fall back to individual operations. + # Note: pre-hooks may fire again via self.put() — acceptable + # since the batch operation was rolled back. for instance in instances: self.put(instance) return instances # Re-raise other errors raise - # Fallback for relations or mixed types + if has_hooks: + for instance in result: + self._hook_runner.run_post(CrudEvent.POST_PUT, self.model_class, instance) + + return result + + # Fallback for relations or mixed types (hooks fire via self.put) for instance in instances: self.put(instance) return instances