diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..78b6f87 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,10 @@ +# Changelog + +## 2026-02-13 + +### Changed +- Restored local ownership of `export_cdv`, `get_dataview_folder`, and `list_cdv_files` in `clicknick.views.dataview_editor.cdv_file`. +- Added canonical `read_mdb_csv()` in `clicknick.data.data_source` returning `dict[int, AddressRow]`. + +### Compatibility +- Kept `load_addresses_from_mdb_dump()` as a backward-compatible alias to `read_mdb_csv()`. diff --git a/extract-click-plc-core-plan.md b/extract-click-plc-core-plan.md new file mode 100644 index 0000000..aca0be7 --- /dev/null +++ b/extract-click-plc-core-plan.md @@ -0,0 +1,360 @@ +# pyclickplc Extraction Plan + +Transition plan for extracting shared CLICK PLC format knowledge from ClickNick into a standalone `pyclickplc` package, consumed by both ClickNick (GUI) and pyrung (simulation engine). + +--- + +## Target Package Layout + +``` +pyclickplc/ + __init__.py # re-export public API + banks.py # ADDRESS_RANGES, MEMORY_TYPE_BASES, DataType, defaults + addresses.py # AddressRecord, get_addr_key, parse/format helpers + blocks.py # BlockTag, BlockRange, MemoryBankMeta, parse/compute + nicknames.py # read/write CSV, type code mappings + validation.py # NICKNAME_MAX_LENGTH, FORBIDDEN_CHARS, validate_nickname() +``` + +--- + +## Phase 0: Simplify Block Tags in ClickNick (Before Extraction) - DONE + +Do this work in ClickNick first. Extracting clean code is easier than extracting complex code and simplifying it after. + +### 0a. Enforce unique block names + +- Add validation: on block create/rename, check for duplicate names across all memory types +- Surface error in editor if duplicate detected +- Remove stack-per-`(memory_type, name)` logic from `compute_all_block_ranges` — simplify to plain dict lookup by name +- Remove memory_type scoping from `find_paired_tag_index` — unique names make it unnecessary + +### 0b. Add auto-suffix for T/TD, CT/CTD + +- Editor behavior: when user creates `` on T rows, auto-create `` on TD rows +- When user renames `` → ``, auto-rename `` → `` +- When user deletes ``, auto-delete `` +- `_D` suffix is convention only — pyclickplc treats them as two independent uniquely-named blocks; pairing is recognized by suffix match + +### 0c. Move `compute_all_block_ranges` back to blocktag model + +Move from `block_service.py` to `blocktag.py` (now simplified by unique names): + +- `find_paired_tag_index` — dict scan by name, no depth tracking +- `find_block_range_indices` — thin wrapper +- `validate_block_span` — format constraint +- `compute_all_block_ranges` — simple open/close matching by unique name + +Keep in `BlockService` (ClickNick editor coordination): + +- `update_colors` — mutates AddressStore +- `auto_update_matching_block_tag` — editor auto-sync UX +- `apply_block_tag` — auto-suffix editor behavior +- `compute_block_colors_map` — UI rendering helper + +--- + +## Phase 1: Extract `banks.py` and `addresses.py` + +Lowest risk. Pure data and pure functions with zero behavioral change. + +### banks.py — from `constants.py` + +Move: + +| Source (`constants.py`) | Destination (`banks.py`) | +|-------------------------------|-------------------------------| +| `ADDRESS_RANGES` | `ADDRESS_RANGES` | +| `MEMORY_TYPE_BASES` | `MEMORY_TYPE_BASES` | +| `_INDEX_TO_TYPE` | `_INDEX_TO_TYPE` | +| `DataType` enum | `DataType` | +| `DATA_TYPE_DISPLAY` | `DATA_TYPE_DISPLAY` | +| `DATA_TYPE_HINTS` | `DATA_TYPE_HINTS` | +| `MEMORY_TYPE_TO_DATA_TYPE` | `MEMORY_TYPE_TO_DATA_TYPE` | +| `DEFAULT_RETENTIVE` | `DEFAULT_RETENTIVE` | +| `INTERLEAVED_PAIRS` | `INTERLEAVED_PAIRS` | +| `INTERLEAVED_TYPE_PAIRS` | `INTERLEAVED_TYPE_PAIRS` | +| `BIT_ONLY_TYPES` | `BIT_ONLY_TYPES` | + +Keep in ClickNick `constants.py` (GUI-specific): + +- `NON_EDITABLE_TYPES` +- `PAIRED_RETENTIVE_TYPES` (editor behavior for retentive sync) + +Update ClickNick: `from pyclickplc.banks import ADDRESS_RANGES, DataType, ...` + +### addresses.py — from `address_row.py` + +Move: + +| Source (`address_row.py`) | Destination (`addresses.py`) | +|----------------------------------|----------------------------------| +| `get_addr_key` | `get_addr_key` | +| `parse_addr_key` | `parse_addr_key` | +| `format_address_display` | `format_address_display` | +| `parse_address_display` | `parse_address_display` | +| `normalize_address` | `normalize_address` | +| `is_xd_yd_upper_byte` | `is_xd_yd_upper_byte` | +| `is_xd_yd_hidden_slot` | `is_xd_yd_hidden_slot` | +| `xd_yd_mdb_to_display` | `xd_yd_mdb_to_display` | +| `xd_yd_display_to_mdb` | `xd_yd_display_to_mdb` | + +New in `addresses.py`: + +```python +@dataclass(frozen=True) +class AddressRecord: + """Minimal address representation shared between consumers.""" + memory_type: str + address: int + nickname: str = "" + comment: str = "" + initial_value: str = "" + retentive: bool = False + data_type: int = DataType.BIT + + @property + def addr_key(self) -> int: + return get_addr_key(self.memory_type, self.address) + + @property + def display_address(self) -> str: + return format_address_display(self.memory_type, self.address) +``` + +Keep in ClickNick `address_row.py`: + +- `AddressRow` (adds validation state, GUI display, editor helpers) +- `AddressRow.from_record(r: AddressRecord)` class method (new, adapter) + +--- + +## Phase 2: Extract `blocks.py` + +Move block tag model from ClickNick's `blocktag.py` (post Phase 0 simplification). + +### blocks.py — from `blocktag.py` + +Move: + +| Source (`blocktag.py`) | Destination (`blocks.py`) | +|-------------------------------------|-------------------------------------| +| `HasComment` protocol | `HasComment` | +| `BlockTag` dataclass | `BlockTag` | +| `BlockRange` dataclass | `BlockRange` | +| `parse_block_tag` | `parse_block_tag` | +| `format_block_tag` | `format_block_tag` | +| `extract_block_name` | `extract_block_name` | +| `strip_block_tag` | `strip_block_tag` | +| `get_block_type` | `get_block_type` | +| `is_block_tag` | `is_block_tag` | +| `_extract_bg_attribute` | `_extract_bg_attribute` | +| `_is_valid_tag_name` | `_is_valid_tag_name` | +| `_try_parse_tag_at` | `_try_parse_tag_at` | +| `find_paired_tag_index` | `find_paired_tag_index` | +| `find_block_range_indices` | `find_block_range_indices` | +| `compute_all_block_ranges` | `compute_all_block_ranges` | +| `validate_block_span` | `validate_block_span` | + +New in `blocks.py`: + +```python +@dataclass(frozen=True) +class MemoryBankMeta: + """Metadata for a memory bank discovered from block tags.""" + name: str + memory_type: str + start_address: int # hardware address + end_address: int # hardware address, inclusive + data_type: int + retentive: bool + bg_color: str | None = None + paired_bank: str | None = None # recognized by _D suffix convention + +def extract_bank_metas( + records: list[AddressRecord], +) -> dict[str, MemoryBankMeta]: + """Compute MemoryBankMetas from address records using block tags. + + Discovers block tag pairs, extracts address ranges and types, + recognizes _D suffix pairing for timer/counter banks. + + Returns: + Dict mapping block name to MemoryBankMeta + """ + ... +``` + +Keep in ClickNick `block_service.py`: + +- `BlockService.update_colors` +- `BlockService.auto_update_matching_block_tag` +- `BlockService.apply_block_tag` +- `BlockService.compute_block_colors_map` + +These import parsing functions from `clickplc.blocks` instead of `blocktag.py`. + +--- + +## Phase 3: Extract `nicknames.py` and `validation.py` + +### nicknames.py — from `data_source.py` + +Move: + +| Source (`data_source.py`) | Destination (`nicknames.py`) | +|--------------------------------------|--------------------------------------| +| `CSV_COLUMNS` | `CSV_COLUMNS` | +| `DATA_TYPE_STR_TO_CODE` | `DATA_TYPE_STR_TO_CODE` | +| `DATA_TYPE_CODE_TO_STR` | `DATA_TYPE_CODE_TO_STR` | +| `ADDRESS_PATTERN` | `ADDRESS_PATTERN` | +| `load_addresses_from_mdb_dump` | `read_mdb_csv` (rename) | +| CSV read logic from `CsvDataSource` | `read_csv` | +| CSV write logic from `CsvDataSource` | `write_csv` | +| `convert_mdb_csv_to_user_csv` | `convert_mdb_csv_to_user_csv` | + +Public API: + +```python +def read_csv(path: str) -> dict[int, AddressRecord]: + """Read CLICK user-format CSV. Returns addr_key -> AddressRecord.""" + ... + +def read_mdb_csv(path: str) -> dict[int, AddressRecord]: + """Read CLICK Address.csv (MDB export format). Returns addr_key -> AddressRecord.""" + ... + +def write_csv(path: str, records: Iterable[AddressRecord]) -> int: + """Write user-format CSV. Returns number of records written.""" + ... + +def load_nickname_file(path: str) -> NicknameProject: + """High-level loader: reads CSV, extracts block tags, builds bank metas. + + Returns: + NicknameProject with records, banks, and standalone tags + """ + ... + +@dataclass +class NicknameProject: + records: dict[int, AddressRecord] # all records + banks: dict[str, MemoryBankMeta] # discovered from block tags + tags: dict[str, AddressRecord] # not in any block +``` + +Keep in ClickNick `data_source.py`: + +- `DataSource` ABC +- `CsvDataSource` — becomes thin adapter: + +```python +class CsvDataSource(DataSource): + def load_all_addresses(self) -> dict[int, AddressRow]: + records = clickplc.read_csv(self._csv_path) + return {k: AddressRow.from_record(r) for k, r in records.items()} + + def save_changes(self, rows: Sequence[AddressRow]) -> int: + records = [r.to_record() for r in rows if r.has_content] + return clickplc.write_csv(self._csv_path, records) +``` + +- `MdbDataSource` — unchanged, stays entirely in ClickNick + +### validation.py — from `constants.py` + +Move: + +| Source (`constants.py`) | Destination (`validation.py`) | +|-----------------------------------|-----------------------------------| +| `NICKNAME_MAX_LENGTH` | `NICKNAME_MAX_LENGTH` | +| `COMMENT_MAX_LENGTH` | `COMMENT_MAX_LENGTH` | +| `FORBIDDEN_CHARS` | `FORBIDDEN_CHARS` | +| `RESERVED_NICKNAMES` | `RESERVED_NICKNAMES` | +| `INT_MIN/MAX, INT2_MIN/MAX, ...` | Numeric range constants | + +New: + +```python +def validate_nickname(name: str) -> tuple[bool, str | None]: + """Validate a nickname against CLICK rules. Returns (valid, error).""" + ... + +def validate_initial_value(value: str, data_type: int) -> tuple[bool, str | None]: + """Validate an initial value for the given data type. Returns (valid, error).""" + ... +``` + +--- + +## Phase 4: Update ClickNick Imports + +Mechanical find-and-replace across ClickNick: + +```python +# Before +from ..models.constants import ADDRESS_RANGES, DataType, ... +from ..models.address_row import get_addr_key, parse_addr_key, ... +from ..models.blocktag import parse_block_tag, BlockTag, ... + +# After +from pyclickplc.banks import ADDRESS_RANGES, DataType, ... +from pyclickplc.addresses import get_addr_key, parse_addr_key, AddressRecord +from pyclickplc.blocks import parse_block_tag, BlockTag, ... +from pyclickplc.nicknames import read_csv, write_csv +from pyclickplc.validation import validate_nickname, NICKNAME_MAX_LENGTH, ... +``` + +Delete moved code from ClickNick. What remains in each file: + +| ClickNick file | Remaining content | +|-------------------------|------------------------------------------------------| +| `constants.py` | `NON_EDITABLE_TYPES`, `PAIRED_RETENTIVE_TYPES` | +| `address_row.py` | `AddressRow` (GUI model), `from_record`, `to_record` | +| `blocktag.py` | Empty or deleted (re-exports from pyclickplc if needed)| +| `block_service.py` | `BlockService` (editor coordination only) | +| `data_source.py` | `DataSource` ABC, `CsvDataSource` (thin), `MdbDataSource` | + +--- + +## Phase 5: Wire into pyrung + +With pyclickplc published: + +```python +# pyrung/click/__init__.py +from pyclickplc.banks import ADDRESS_RANGES, DataType, DEFAULT_RETENTIVE +from pyclickplc.addresses import AddressRecord +from pyclickplc.blocks import MemoryBankMeta +from pyclickplc.nicknames import load_nickname_file + +# Pre-built banks constructed from pyclickplc data +X = MemoryBank("X", TagType.BOOL, range(1, ADDRESS_RANGES["X"][1] + 1), retentive=False) +DS = MemoryBank("DS", TagType.INT, range(1, ADDRESS_RANGES["DS"][1] + 1), retentive=True) +# etc. +``` + +```python +# pyrung TagMap integration +class TagMap: + @classmethod + def from_nickname_file(cls, path: str) -> "TagMap": + project = load_nickname_file(path) + mapping = {} + for meta in project.banks.values(): + bank = MemoryBank.from_meta(meta) + mapping[bank] = hardware_slice_from_meta(meta) + for name, record in project.tags.items(): + tag = tag_from_record(record) + mapping[tag] = hardware_address_from_record(record) + return cls(mapping) +``` + +--- + +## Future (Not This Plan) + +- Mutation API in pyclickplc: `add_block_tag`, `rename_block_tag`, `delete_block_tag` +- Nickname CSV export from pyrung (round-trip: pyrung → CSV → ClickNick → hardware) +- `from_nickname_file()` nickname export (generate CSV from TagMap) +- Click nickname file format changes (if Automation Direct updates) diff --git a/pyproject.toml b/pyproject.toml index 9aa8c6a..823c739 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -37,12 +37,12 @@ classifiers = [ # ---- Main dependencies ---- dependencies = [ + "pyclickplc", "pywin32 >= 311", "pyodbc >= 5.3.0", "tksheet == 7.5.19", ] - # ---- Dev dependencies ---- [dependency-groups] diff --git a/scratchpad/dataview-live-plan.md b/scratchpad/dataview-live-plan.md new file mode 100644 index 0000000..1a303be --- /dev/null +++ b/scratchpad/dataview-live-plan.md @@ -0,0 +1,165 @@ +# Plan: ClickNick Dataview Live Integration Requirements + +## Scope + +This plan now covers only ClickNick-side requirements. + +`ModbusService` implementation and service-level batching behavior are planned in: + +- `../pyclickplc/scratchpad/modbus-service-plan.md` + +ClickNick should treat that service as an external dependency and only integrate against its public API. + +## Required pyclickplc API + +ClickNick integration assumes `pyclickplc` provides: + +- `ConnectionState` +- `ModbusService` +- `set_poll_addresses(addresses: Iterable[str])` +- `clear_poll_addresses()` +- `write(values: Mapping[str, PlcValue] | Iterable[tuple[str, PlcValue]])` +- service callbacks for state and values + +## ClickNick Requirements + +### 1. DataviewPanel UI + data flow + +File: `src/clicknick/views/dataview_editor/panel.py` + +- Show 6 columns: + - `Address` + - `Nickname` + - `Comment` + - `New Value` (editable for writable rows only) + - `Write` (checkbox) + - `Live` (read-only) +- Use explicit `DataViewFile` helpers for New Value and Live formatting/parsing: + - `DataViewFile.value_to_display(...)` for UI display text + - `DataViewFile.validate_row_display(...)` for edit validation + - `DataViewFile.try_parse_display(...)` or `DataViewFile.set_row_new_value_from_display(...)` for native value updates +- Keep write checkboxes only on writable, non-empty-address rows. +- Keep `New Value` effectively read-only for non-writable addresses. + +Panel methods required by window integration: + +- `get_poll_addresses() -> list[str]`: + - must return only valid, non-empty, canonical-uppercase addresses + - must deduplicate while preserving row order +- `update_live_values(values: Mapping[str, PlcValue]) -> None` +- `clear_live_values() -> None` +- `get_write_rows() -> list[tuple[str, PlcValue]]` (checked + writable only) +- `get_write_all_rows() -> list[tuple[str, PlcValue]]` (all writable rows with non-empty new value) +- `clear_write_checks() -> None` + +### 2. DataviewEditorWindow integration + +File: `src/clicknick/views/dataview_editor/window.py` + +- Add a dedicated `Modbus` toolbar section in the main Dataview window (primary connection UX): + - `Host` input + - `Port` input + - `Connect/Disconnect` toggle button + - connection state indicator text + - optional last-error text +- Add `Connection` menu: + - `Connect` + - `Disconnect` +- Do not add separate connect/disconnect windows; connection controls live in the toolbar. +- Keep `Connection` menu as secondary entry points that call the same toolbar-backed handlers and use current toolbar host/port values. +- Own a service instance: + - `self._modbus: ModbusService | None` +- On connect: + - wire `on_state` and `on_values` callbacks + - marshal callback UI updates with `self.after(0, ...)` + - immediately sync poll addresses from the currently visible tab after successful connect + - if there is no visible tab, call `self._modbus.clear_poll_addresses()` +- On tab change: + - clear `Live` values in the tab being hidden + - replace poll list with newly active tab addresses + - if there is no active tab, call `self._modbus.clear_poll_addresses()` +- On tab close: + - if the closed tab was active, recompute from new active tab and replace poll list + - if no tabs remain, call `self._modbus.clear_poll_addresses()` +- On address edits: + - if edit happened in active tab, replace poll list with active tab addresses + - if edit happened in inactive tab, do not change poll list +- Add toolbar actions: + - `Write Checked`: send `panel.get_write_rows()` + - `Write All`: send `panel.get_write_all_rows()` + - both disabled unless connected + - grouped with Modbus connect controls in the same toolbar region +- On successful write completion: + - call `panel.clear_write_checks()` +- On disconnect: + - call `self._modbus.clear_poll_addresses()` before disconnect (or equivalent stop behavior) + - clear live values in all open panels +- On window close: + - call `self._modbus.disconnect()` before destroy + +### 3. Write behavior requirements + +- `Write Checked` writes only checked writable rows. +- `Write All` writes all writable rows with non-empty `new_value`. +- Non-writable rows must never be included in ClickNick write payloads. +- ClickNick payload should be address + native value only; service owns protocol/write strategy. + +### 4. Poll behavior requirements + +- Poll list is replace-based per active context, not additive subscription. +- Only the currently visible tab is polled. +- Hidden tabs are never polled. +- Active tab fully controls poll list (single source of truth). +- Connect immediately syncs poll list from active tab (no user action required). +- Address edits in active tab refresh poll list; edits in inactive tabs do not. +- Switching away from a tab clears that tab's `Live` values. +- No active tab (for example, last tab closed) must clear poll addresses in service. +- Disconnect clears live display values in all open tabs. + +## ClickNick Test Requirements + +1. Panel behavior +- New Value edit validation and normalization uses `DataViewFile` helpers. +- Write checkbox lifecycle tracks writability/address presence. +- `get_write_rows()` and `get_write_all_rows()` include only writable rows. +- `get_poll_addresses()` returns canonical uppercase, valid-only, de-duplicated addresses. + +2. Window integration +- Mocks `pyclickplc.ModbusService`. +- Verifies Modbus toolbar controls drive connect/disconnect handlers. +- Verifies `Connection` menu entries call the same handlers as toolbar controls. +- Verifies connect immediately pushes active tab poll list. +- Verifies tab change replaces poll list and clears hidden tab live values. +- Verifies active-tab address edits call `set_poll_addresses(...)`. +- Verifies inactive-tab address edits do not call `set_poll_addresses(...)`. +- Verifies closing active/last tab clears poll addresses. +- Verifies no-tab state calls `clear_poll_addresses()`. +- Verifies state/value callbacks are marshaled via `after(...)`. +- Verifies write actions call service with expected payloads. +- Verifies connect/disconnect button/menu enablement and cleanup. + +Suggested file: + +- `tests/test_dataview_modbus_integration.py` + +## Files (ClickNick only) + +- `src/clicknick/views/dataview_editor/panel.py` +- `src/clicknick/views/dataview_editor/window.py` +- `tests/test_dataview_modbus_integration.py` + +## Verification (ClickNick) + +1. `make lint` +2. `make test` +3. Manual: + - Open `.cdv` -> 6 columns visible + - Modbus toolbar visible with host/port + connect controls + - Edit `New Value` -> valid normalization + invalid rejection + - Connect from toolbar -> active tab starts live polling immediately + - Tab change -> previous tab live values clear; new active tab values populate + - Active-tab address change -> poll list updates immediately + - Close last tab -> polling stops + - Write Checked -> only checked writable rows written + - Write All -> only writable rows with values written + - Disconnect/close -> live values cleared and service disconnected diff --git a/src/clicknick/app.py b/src/clicknick/app.py index 640aa11..ff99304 100644 --- a/src/clicknick/app.py +++ b/src/clicknick/app.py @@ -271,10 +271,6 @@ def _open_address_editor(self): Multiple windows can be opened and they will share the same data. Changes made in one window are automatically reflected in others. """ - if not self.connected_click_pid: - self._update_status("Connect to a ClickPLC window first", "error") - return - # Check for ODBC drivers (MDB mode requires them) csv_path = self.csv_path_var.get() if not csv_path and not self.nickname_manager.has_access_driver(): @@ -307,10 +303,6 @@ def _open_dataview_editor(self): The dataview editor allows creating and editing CLICK DataView files (.cdv). Only one DataviewEditorWindow can be open at a time. """ - if not self.connected_click_pid: - self._update_status("Connect to a ClickPLC window first", "error") - return - # Use the app-level SharedAddressData if self._shared_address_data is None: self._update_status("No data loaded", "error") @@ -324,6 +316,15 @@ def _open_dataview_editor(self): # Get project path from connected Click window project_path = get_project_path_from_hwnd(self.connected_click_hwnd) + # When no CLICK project, use CSV directory for CDV file discovery + csv_fallback_folder = None + if project_path is None: + csv_path = self.csv_path_var.get() + if csv_path: + from pathlib import Path + + csv_fallback_folder = Path(csv_path).parent + # Create or reuse shared dataview data if not hasattr(self, "_dataview_editor_shared_data"): self._dataview_editor_shared_data = None @@ -338,6 +339,7 @@ def _open_dataview_editor(self): self._dataview_editor_shared_data = SharedDataviewData( project_path=project_path, address_store=self._shared_address_data, + dataview_folder=csv_fallback_folder, ) self._dataview_editor_project_path = project_path @@ -384,10 +386,7 @@ def _verify_mdb_and_cdv(self): project_path = get_project_path_from_hwnd(self.connected_click_hwnd) result = run_verification(self._shared_address_data, project_path) - # Check if we have only system nickname issues (no actionable issues) - has_only_system_issues = result.passed and result.system_nickname_issues - - if result.passed and not result.system_nickname_issues: + if result.passed: messagebox.showinfo( "Verification Complete", f"All checks passed!\n\n" @@ -402,77 +401,21 @@ def _verify_mdb_and_cdv(self): dialog.geometry("700x500") dialog.transient(self.root) - # Summary label at top - if has_only_system_issues: - summary = ( - f"No actionable issues found!\n" - f"MDB addresses verified: {result.total_addresses}\n" - f"CDV files verified: {result.cdv_files_checked}" - ) - else: - summary = ( - f"Found {result.total_issues} issue(s)\n" - f"MDB issues: {len(result.mdb_issues)} (of {result.total_addresses} addresses)\n" - f"CDV issues: {len(result.cdv_issues)} (in {result.cdv_files_checked} files)" - ) + summary = ( + f"Found {result.total_issues} issue(s)\n" + f"MDB issues: {len(result.mdb_issues)} (of {result.total_addresses} addresses)\n" + f"CDV issues: {len(result.cdv_issues)} (in {result.cdv_files_checked} files)" + ) ttk.Label(dialog, text=summary, padding=10).pack(fill=tk.X) - # Close button at bottom (pack first with side=BOTTOM) ttk.Button(dialog, text="Close", command=dialog.destroy).pack( side=tk.BOTTOM, pady=(0, 10) ) - # Collapsible section for system nickname issues (pack with side=BOTTOM) - if result.system_nickname_issues: - collapse_frame = ttk.Frame(dialog) - collapse_frame.pack(side=tk.BOTTOM, fill=tk.X, padx=10, pady=(0, 10)) - - # Container for the text area (above the button) - text_container = ttk.Frame(collapse_frame) - - # Track expanded state - is_expanded = tk.BooleanVar(value=False) - system_text_area = None - - def toggle_system_issues(): - nonlocal system_text_area - if is_expanded.get(): - # Collapse - text_container.pack_forget() - toggle_btn.config( - text=f"+ System nicknames starting with _ ({len(result.system_nickname_issues)})" - ) - is_expanded.set(False) - else: - # Expand - pack container above button - text_container.pack(side=tk.TOP, fill=tk.X, pady=(0, 5)) - if system_text_area is None: - system_text_area = scrolledtext.ScrolledText( - text_container, wrap=tk.WORD, width=80, height=8 - ) - system_text_area.insert( - tk.END, "\n".join(result.system_nickname_issues) - ) - system_text_area.config(state=tk.DISABLED) - system_text_area.pack(fill=tk.X) - toggle_btn.config( - text=f"- System nicknames starting with _ ({len(result.system_nickname_issues)})" - ) - is_expanded.set(True) - - toggle_btn = ttk.Button( - collapse_frame, - text=f"+ System nicknames starting with _ ({len(result.system_nickname_issues)})", - command=toggle_system_issues, - ) - toggle_btn.pack(side=tk.BOTTOM, anchor=tk.W) - - # Main scrollable text area for actionable issues (fills remaining space) - if result.all_issues: - text_area = scrolledtext.ScrolledText(dialog, wrap=tk.WORD, width=80, height=20) - text_area.pack(fill=tk.BOTH, expand=True, padx=10, pady=(0, 10)) - text_area.insert(tk.END, "\n".join(result.all_issues)) - text_area.config(state=tk.DISABLED) + text_area = scrolledtext.ScrolledText(dialog, wrap=tk.WORD, width=80, height=20) + text_area.pack(fill=tk.BOTH, expand=True, padx=10, pady=(0, 10)) + text_area.insert(tk.END, "\n".join(result.all_issues)) + text_area.config(state=tk.DISABLED) dialog.grab_set() dialog.focus_set() @@ -697,6 +640,13 @@ def _check_odbc_drivers_and_warn(self): return False return True + def _clear_connection_state(self) -> None: + """Clear the currently connected Click window metadata.""" + self.connected_click_pid = None + self.connected_click_filename = None + self.connected_click_hwnd = None + self.using_database = False + def refresh_click_instances(self): """Refresh the list of running Click.exe instances.""" # Remember currently selected instance @@ -712,6 +662,8 @@ def refresh_click_instances(self): click_instances = self.detector.get_click_instances() if not click_instances: + self._clear_connection_state() + self._update_window_title() self._update_status("⚠ No ClickPLC windows", "error") self.start_button.state(["disabled"]) # Disable when no instances return @@ -763,7 +715,18 @@ def load_csv(self): self._update_status("✓ CSV loaded", "connected") self.using_database = False self._update_window_title() - self.start_monitoring() + has_live_connection = bool(self.connected_click_pid and self.connected_click_hwnd) and ( + self.detector.check_window_exists(self.connected_click_pid) + ) + if has_live_connection: + self.start_monitoring() + else: + if self.monitoring: + self.stop_monitoring(update_status=False) + if self.connected_click_pid or self.connected_click_hwnd: + self._clear_connection_state() + self.selected_instance_var.set("") + self._update_window_title() return True except Exception as e: print(f"Error loading CSV: {e}") @@ -876,10 +839,7 @@ def connect_to_instance(self, pid, title, filename, hwnd): self.stop_monitoring() # Reset connection state - self.connected_click_pid = None - self.connected_click_filename = None - self.connected_click_hwnd = None - self.using_database = False + self._clear_connection_state() # Clear CSV path when switching instances self.csv_path_var.set("") @@ -961,14 +921,23 @@ def _handle_window_closed(self): self._update_status("⚠ Connected ClickPLC window closed", "error") self.stop_monitoring(update_status=False) - # Force close any open editor windows (can't save - DB is gone) - if self._shared_address_data is not None: + source_is_mdb = isinstance( + self._shared_data_source_path, str + ) and self._shared_data_source_path.startswith("mdb:") + + # Force close editor windows only for MDB-backed data. + if source_is_mdb and self._shared_address_data is not None: self._shared_address_data.force_close_all_windows() self._shared_address_data = None self._shared_data_source_path = None - # Disconnect NicknameManager from data - self.nickname_manager.set_shared_data(None) + # MDB data is no longer valid once the Click window is gone. + self.nickname_manager.set_shared_data(None) + + # Always clear stale Click connection metadata. + self._clear_connection_state() + self.selected_instance_var.set("") + self._update_window_title() self.root.after(2000, self.refresh_click_instances) diff --git a/src/clicknick/data/address_store.py b/src/clicknick/data/address_store.py index 3b02965..9008056 100644 --- a/src/clicknick/data/address_store.py +++ b/src/clicknick/data/address_store.py @@ -18,15 +18,18 @@ from dataclasses import replace from typing import TYPE_CHECKING -from ..models.address_row import AddressRow, get_addr_key, is_xd_yd_hidden_slot -from ..models.blocktag import parse_block_tag -from ..models.constants import ( - ADDRESS_RANGES, +from pyclickplc.addresses import get_addr_key, is_xd_yd_hidden_slot +from pyclickplc.banks import ( + BANKS, DEFAULT_RETENTIVE, INTERLEAVED_PAIRS, MEMORY_TYPE_TO_DATA_TYPE, DataType, ) +from pyclickplc.blocks import parse_block_tag +from pyclickplc.validation import SYSTEM_NICKNAME_TYPES + +from ..models.address_row import AddressRow from ..models.validation import validate_comment, validate_initial_value, validate_nickname from ..services.block_service import BlockService, compute_all_block_ranges from ..services.nickname_index_service import NicknameIndexService @@ -126,24 +129,31 @@ def _create_base_skeleton(self) -> dict[int, AddressRow]: """ skeleton: dict[int, AddressRow] = {} - for mem_type, (start, end) in ADDRESS_RANGES.items(): + for mem_type, bank in BANKS.items(): default_data_type = MEMORY_TYPE_TO_DATA_TYPE.get(mem_type, DataType.BIT) default_retentive = DEFAULT_RETENTIVE.get(mem_type, False) - for addr in range(start, end + 1): - # Skip hidden XD/YD slots - if is_xd_yd_hidden_slot(mem_type, addr): - continue - - addr_key = get_addr_key(mem_type, addr) - row = AddressRow( - memory_type=mem_type, - address=addr, - data_type=default_data_type, - retentive=default_retentive, - ) - skeleton[addr_key] = row - self.row_order.append(addr_key) + # Use valid_ranges for sparse banks (X/Y), full range for contiguous + if bank.valid_ranges is not None: + ranges = bank.valid_ranges + else: + ranges = ((bank.min_addr, bank.max_addr),) + + for lo, hi in ranges: + for addr in range(lo, hi + 1): + # Skip hidden XD/YD slots + if is_xd_yd_hidden_slot(mem_type, addr): + continue + + addr_key = get_addr_key(mem_type, addr) + row = AddressRow( + memory_type=mem_type, + address=addr, + data_type=default_data_type, + retentive=default_retentive, + ) + skeleton[addr_key] = row + self.row_order.append(addr_key) return skeleton @@ -151,8 +161,13 @@ def _mark_loaded_with_errors(self) -> None: """Mark X/SC/SD rows that loaded with invalid nicknames.""" all_nicks = self.all_nicknames for addr_key, row in self.visible_state.items(): - if row.memory_type in ("X", "SC", "SD") and row.nickname: - is_valid, _ = validate_nickname(row.nickname, all_nicks, addr_key) + if row.memory_type in SYSTEM_NICKNAME_TYPES and row.nickname: + is_valid, _ = validate_nickname( + row.nickname, + all_nicks, + addr_key, + system_bank=row.memory_type, + ) if not is_valid: # Update both base and visible updated = replace(row, loaded_with_error=True) @@ -168,12 +183,35 @@ def _validate_row(self, addr_key: int, all_nicknames: dict[int, str] | None = No if not row: return + base_row = self.base_state.get(addr_key) + is_loaded_base_nickname = ( + base_row is not None and row.nickname != "" and base_row.nickname == row.nickname + ) + system_bank: str | None = None + if row.memory_type in SYSTEM_NICKNAME_TYPES: + # System nickname handling is intentionally narrow: only allow + # unchanged loaded values from PLC metadata. + if row.memory_type == "X": + # X system nicknames are auto-generated _IO* signals. + if is_loaded_base_nickname and row.nickname.startswith("_IO"): + system_bank = "X" + else: + # SC/SD system-style names are allowed only when unchanged from load. + if is_loaded_base_nickname: + system_bank = row.memory_type + nickname_valid, nickname_error = validate_nickname( - row.nickname, all_nicknames, addr_key, self.is_duplicate_nickname + row.nickname, + all_nicknames, + addr_key, + self.is_duplicate_nickname, + system_bank=system_bank, ) - # Validate comment - comment_valid, comment_error = validate_comment(row.comment) + # Validate comment (includes block name uniqueness check) + comment_valid, comment_error = validate_comment( + row.comment, self.is_duplicate_block_name, addr_key + ) # Validate initial value init_valid, init_error = validate_initial_value(row.initial_value, row.data_type) @@ -359,7 +397,12 @@ def _apply_cascades(self, session: EditSession) -> None: """Apply automatic syncs (T/TD, block tags) to pending changes.""" def _sync_interleaved_pair(src_row: AddressRow, comment: str) -> None: - """Sync comment to the interleaved pair (e.g. T -> TD).""" + """Sync comment to the interleaved pair (e.g. T -> TD). + + For block tags, applies _D suffix transformation: + - T/CT blocks use base name (e.g., "Pumps") + - TD/CTD blocks use "_D" suffix (e.g., "Pumps_D") + """ if src_row.memory_type not in INTERLEAVED_PAIRS: return @@ -372,7 +415,13 @@ def _sync_interleaved_pair(src_row: AddressRow, comment: str) -> None: paired_row = self.visible_state.get(paired_key) if paired_row: - synced = BlockService.apply_block_tag(comment, paired_row.comment) + # Use suffix-aware sync for block tags + synced = BlockService.apply_block_tag_for_interleaved_pair( + comment, + paired_row.comment, + src_row.memory_type, + paired_type, + ) if synced is not None: session.get_builder(paired_key).comment = synced @@ -473,6 +522,8 @@ def _recompute_visible(self, affected_keys: set[int]) -> None: comment_error=override.comment_error, initial_value_valid=override.initial_value_valid, initial_value_error=override.initial_value_error, + loaded_with_error=base.loaded_with_error + and override.nickname == base.nickname, ) else: self.visible_state[addr_key] = override @@ -830,6 +881,37 @@ def is_duplicate_nickname(self, nickname: str, exclude_addr_key: int) -> bool: """Check if nickname is used by another address.""" return self._nickname_service.is_duplicate(nickname, exclude_addr_key) + def is_duplicate_block_name(self, block_name: str, exclude_addr_key: int) -> bool: + """Check if block name is used by another block definition. + + Block names must be unique across all memory types. A "block definition" + is an opening tag () or self-closing tag (). Closing tags + () are NOT duplicates - they're supposed to match their opening tag. + + Args: + block_name: The block name to check + exclude_addr_key: The addr_key to exclude (the row being validated) + + Returns: + True if block name is already defined by another opening/self-closing tag + """ + # First check: if current row has a closing tag, it's never a duplicate + current_row = self.visible_state.get(exclude_addr_key) + if current_row: + current_tag = parse_block_tag(current_row.comment) + if current_tag.tag_type == "close": + return False # Closing tags are allowed to match their opening tag + + # Only count opening and self-closing tags as block definitions + # Use visible_state directly (works during initial load before unified view exists) + for addr_key, row in self.visible_state.items(): + if addr_key == exclude_addr_key: + continue + tag = parse_block_tag(row.comment) + if tag.name == block_name and tag.tag_type in ("open", "self-closing"): + return True + return False + def update_nickname(self, addr_key: int, old_nickname: str, new_nickname: str) -> None: """Update nickname in the index.""" self._nickname_service.update(addr_key, old_nickname, new_nickname) diff --git a/src/clicknick/data/data_source.py b/src/clicknick/data/data_source.py index 5fb8b22..59c01ee 100644 --- a/src/clicknick/data/data_source.py +++ b/src/clicknick/data/data_source.py @@ -11,13 +11,15 @@ from abc import ABC, abstractmethod from typing import TYPE_CHECKING -from ..models.address_row import AddressRow, get_addr_key -from ..models.constants import ( - ADDRESS_RANGES, +from pyclickplc.addresses import get_addr_key +from pyclickplc.banks import ( + BANKS, DEFAULT_RETENTIVE, MEMORY_TYPE_BASES, MEMORY_TYPE_TO_DATA_TYPE, ) + +from ..models.address_row import AddressRow from ..utils.mdb_operations import ( MdbConnection, find_click_database, @@ -28,35 +30,15 @@ if TYPE_CHECKING: from collections.abc import Sequence -# CSV column names (matching CLICK software export format) -CSV_COLUMNS = ["Address", "Data Type", "Nickname", "Initial Value", "Retentive", "Address Comment"] - -# Data type string to code mapping -DATA_TYPE_STR_TO_CODE: dict[str, int] = { - "BIT": 0, - "INT": 1, - "INT2": 2, - "FLOAT": 3, - "HEX": 4, - "TXT": 6, - "TEXT": 6, # Alias -} - -# Data type code to string mapping (for saving csv) -DATA_TYPE_CODE_TO_STR: dict[int, str] = { - 0: "BIT", - 1: "INT", - 2: "INT2", - 3: "FLOAT", - 4: "HEX", - 6: "TEXT", # Alias -} +from pyclickplc.nicknames import CSV_COLUMNS as CSV_COLUMNS +from pyclickplc.nicknames import DATA_TYPE_CODE_TO_STR as DATA_TYPE_CODE_TO_STR +from pyclickplc.nicknames import DATA_TYPE_STR_TO_CODE as DATA_TYPE_STR_TO_CODE # Regex for parsing address strings like "X001", "C100", "DS1000", "TD5" ADDRESS_PATTERN = re.compile(r"^([A-Z]+)(\d+)$") -def load_addresses_from_mdb_dump(csv_path: str) -> dict[int, AddressRow]: +def read_mdb_csv(csv_path: str) -> dict[int, AddressRow]: """Load addresses from MDB-format CSV (CLICK Address.csv export). The CLICK software exports Address.csv in MDB format with columns: @@ -81,7 +63,7 @@ def load_addresses_from_mdb_dump(csv_path: str) -> dict[int, AddressRow]: continue mem_type = row.get("MemoryType", "").strip() - if mem_type not in ADDRESS_RANGES: + if mem_type not in BANKS: continue try: @@ -120,6 +102,11 @@ def load_addresses_from_mdb_dump(csv_path: str) -> dict[int, AddressRow]: return result +def load_addresses_from_mdb_dump(csv_path: str) -> dict[int, AddressRow]: + """Backward-compatible alias for read_mdb_csv().""" + return read_mdb_csv(csv_path) + + class DataSource(ABC): """Abstract base class for address data sources. @@ -234,7 +221,7 @@ def load_all_addresses(self) -> dict[int, AddressRow]: mem_type, address = parsed # Skip if memory type not recognized - if mem_type not in ADDRESS_RANGES: + if mem_type not in BANKS: continue # Get data type (default based on memory type) @@ -346,7 +333,7 @@ def convert_mdb_csv_to_user_csv(source_path: str, dest_path: str) -> None: dest_path: Path to write user-format CSV """ # Load as AddressRows - addresses = load_addresses_from_mdb_dump(source_path) + addresses = read_mdb_csv(source_path) # Save using CsvDataSource (reuses existing save logic) csv_source = CsvDataSource(dest_path) diff --git a/src/clicknick/data/nickname_manager.py b/src/clicknick/data/nickname_manager.py index 269ffbb..21eef8e 100644 --- a/src/clicknick/data/nickname_manager.py +++ b/src/clicknick/data/nickname_manager.py @@ -2,7 +2,8 @@ from typing import TYPE_CHECKING -from ..models.blocktag import strip_block_tag +from pyclickplc.blocks import strip_block_tag + from ..models.nickname import Nickname from ..utils.filters import ContainsFilter, ContainsPlusFilter, NoneFilter, PrefixFilter from ..utils.mdb_shared import has_access_driver diff --git a/src/clicknick/data/shared_dataview.py b/src/clicknick/data/shared_dataview.py index 32c715a..561f2d0 100644 --- a/src/clicknick/data/shared_dataview.py +++ b/src/clicknick/data/shared_dataview.py @@ -9,13 +9,14 @@ from pathlib import Path from typing import TYPE_CHECKING -from ..models.address_row import ( +from pyclickplc.addresses import ( get_addr_key, - parse_address_display, + parse_address, ) -from ..models.address_row import ( +from pyclickplc.addresses import ( normalize_address as _normalize_address, ) + from ..views.dataview_editor.cdv_file import get_dataview_folder, list_cdv_files if TYPE_CHECKING: @@ -36,22 +37,24 @@ def __init__( self, project_path: Path | None = None, address_store: AddressStore | None = None, + dataview_folder: Path | None = None, ): """Initialize the shared dataview data. Args: project_path: Path to the CLICK project folder address_store: AddressStore for nickname lookups + dataview_folder: Explicit DataView folder override (e.g., CSV directory) """ self._project_path = project_path self._store: AddressStore | None = None - self._dataview_folder: Path | None = None + self._dataview_folder: Path | None = dataview_folder # Single window tracking (only one dataview editor at a time) self._window = None - # Find dataview folder - if project_path: + # Find dataview folder from project if not explicitly provided + if self._dataview_folder is None and project_path: self._dataview_folder = get_dataview_folder(project_path) # Wire up to AddressStore if provided @@ -117,7 +120,7 @@ def lookup_nickname(self, address: str) -> tuple[str, str] | None: Tuple of (nickname, comment) or None if not found. """ if self._store: - parsed = parse_address_display(address) + parsed = parse_address(address) if not parsed: return None diff --git a/src/clicknick/models/address_row.py b/src/clicknick/models/address_row.py index 1a10f02..2221d48 100644 --- a/src/clicknick/models/address_row.py +++ b/src/clicknick/models/address_row.py @@ -1,223 +1,24 @@ """Data model for the Address Editor. -Contains AddressRow frozen dataclass, validation functions, and AddrKey calculations. +Contains AddressRow frozen dataclass with UI validation state. +Address helpers and constants are imported from pyclickplc. """ from __future__ import annotations from dataclasses import dataclass, field -from .constants import ( - _INDEX_TO_TYPE, +from pyclickplc.addresses import ( + format_address_display, + get_addr_key, +) +from pyclickplc.banks import ( DATA_TYPE_DISPLAY, DEFAULT_RETENTIVE, - MEMORY_TYPE_BASES, NON_EDITABLE_TYPES, DataType, ) -# ============================================================================== -# Helper Functions -# ============================================================================== - - -def get_addr_key(memory_type: str, address: int) -> int: - """Calculate AddrKey from memory type and MDB address. - - Args: - memory_type: The memory type (X, Y, C, etc.) - address: The MDB address number - - Returns: - The AddrKey value used as primary key in MDB - - Raises: - KeyError: If memory_type is not recognized - """ - return MEMORY_TYPE_BASES[memory_type] + address - - -def parse_addr_key(addr_key: int) -> tuple[str, int]: - """Parse an AddrKey back to memory type and MDB address. - - Args: - addr_key: The AddrKey value from MDB - - Returns: - Tuple of (memory_type, mdb_address) - - Raises: - KeyError: If the type index is not recognized - """ - type_index = addr_key >> 24 - address = addr_key & 0xFFFFFF - return _INDEX_TO_TYPE[type_index], address - - -def is_xd_yd_upper_byte(memory_type: str, mdb_address: int) -> bool: - """Check if an XD/YD MDB address is for an upper byte (only XD0u/YD0u at MDB 1). - - XD/YD structure: - - MDB 0 = XD0 (lower), MDB 1 = XD0u (upper) - only slot 0 has upper byte variant - - MDB 2 = XD1, MDB 4 = XD2, ... MDB 16 = XD8 (no upper byte variants displayed) - - Args: - memory_type: The memory type (XD or YD) - mdb_address: The MDB address number - - Returns: - True if this is XD0u/YD0u (MDB address 1) - """ - if memory_type in ("XD", "YD"): - return mdb_address == 1 - return False - - -def is_xd_yd_hidden_slot(memory_type: str, mdb_address: int) -> bool: - """Check if an XD/YD MDB address is a hidden slot (odd addresses > 1). - - These are the upper byte slots for XD1-8/YD1-8 that aren't displayed. - - Args: - memory_type: The memory type (XD or YD) - mdb_address: The MDB address number - - Returns: - True if this slot should be hidden (odd addresses >= 3) - """ - if memory_type in ("XD", "YD"): - return mdb_address >= 3 and mdb_address % 2 == 1 - return False - - -def xd_yd_mdb_to_display(mdb_address: int) -> int: - """Convert XD/YD MDB address to display address number. - - XD/YD structure: - - MDB 0 -> 0 (XD0) - - MDB 1 -> 0 (XD0u) - - MDB 2 -> 1 (XD1), MDB 4 -> 2 (XD2), ... MDB 16 -> 8 (XD8) - - Args: - mdb_address: The MDB address (0, 1, 2, 4, 6, ...) - - Returns: - The display address number (0, 0, 1, 2, 3, ...) - """ - if mdb_address <= 1: - return 0 - return mdb_address // 2 - - -def xd_yd_display_to_mdb(display_addr: int, upper_byte: bool = False) -> int: - """Convert XD/YD display address to MDB address. - - Args: - display_addr: The display address (0-8) - upper_byte: True for XD0u/YD0u (only valid for display_addr=0) - - Returns: - The MDB address - """ - if display_addr == 0: - return 1 if upper_byte else 0 - return display_addr * 2 - - -def format_address_display(memory_type: str, mdb_address: int) -> str: - """Format a memory type and address as a display string. - - For XD/YD: - - MDB 0 -> "XD0", MDB 1 -> "XD0u" - - MDB 2 -> "XD1", MDB 4 -> "XD2", ... MDB 16 -> "XD8" - - Odd addresses >= 3 are hidden slots (returns "XDn?" to indicate) - - For X/Y (bits): - - 3-digit zero-padded: X001, Y001, X816, Y816 - - Args: - memory_type: The memory type (X, XD, DS, etc.) - mdb_address: The MDB address number - - Returns: - Formatted address string (e.g., "X001", "XD0", "XD0u", "DS100") - """ - if memory_type in ("XD", "YD"): - if mdb_address == 0: - return f"{memory_type}0" - elif mdb_address == 1: - return f"{memory_type}0u" - else: - display_addr = mdb_address // 2 - return f"{memory_type}{display_addr}" - if memory_type in ("X", "Y"): - return f"{memory_type}{mdb_address:03d}" - return f"{memory_type}{mdb_address}" - - -def parse_address_display(address_str: str) -> tuple[str, int] | None: - """Parse a display address string to memory type and MDB address. - - Handles XD/YD: "XD0u" -> ("XD", 1), "XD1" -> ("XD", 2), "XD8" -> ("XD", 16) - - Args: - address_str: Address string like "X001", "XD0", "XD0u", "XD8" - - Returns: - Tuple of (memory_type, mdb_address) or None if invalid - """ - import re - - if not address_str: - return None - - address_str = address_str.strip().upper() - - # Match pattern: letters followed by digits, optionally ending with 'U' - match = re.match(r"^([A-Z]+)(\d+)(U?)$", address_str) - if not match: - return None - - memory_type = match.group(1) - display_addr = int(match.group(2)) - is_upper = match.group(3) == "U" - - if memory_type not in MEMORY_TYPE_BASES: - return None - - if memory_type in ("XD", "YD"): - # Only XD0/YD0 can have 'u' suffix - if is_upper and display_addr != 0: - return None # Invalid: XD1u, XD2u, etc. don't exist - return memory_type, xd_yd_display_to_mdb(display_addr, is_upper) - - # For non-XD/YD, display address equals MDB address - return memory_type, display_addr - - -def normalize_address(address: str) -> str | None: - """Normalize an address string to its canonical display form. - - Parses the input address and returns the properly formatted display address - (e.g., "x1" -> "X001", "xd0u" -> "XD0u"). - - Args: - address: The address string to normalize (e.g., "x1", "XD0U") - - Returns: - The normalized display address, or None if address is invalid. - """ - parsed = parse_address_display(address) - if not parsed: - return None - memory_type, mdb_address = parsed - return format_address_display(memory_type, mdb_address) - - -# ============================================================================== -# Main Data Model -# ============================================================================== - @dataclass(frozen=True) class AddressRow: @@ -365,9 +166,9 @@ def is_empty(self) -> bool: @property def should_ignore_validation_error(self) -> bool: - """True if validation errors should be ignored (SC/SD loaded with invalid data). + """True if validation errors should be ignored for loaded system nicknames. - SC/SD addresses often have system-preset nicknames that violate validation rules. + Some SC/SD/X addresses can have PLC-generated nicknames with non-user format. If the row was loaded with errors, we ignore the errors for display purposes. """ return self.loaded_with_error diff --git a/src/clicknick/models/blocktag.py b/src/clicknick/models/blocktag.py index ca1082b..74c95bb 100644 --- a/src/clicknick/models/blocktag.py +++ b/src/clicknick/models/blocktag.py @@ -1,273 +1,17 @@ -"""Data model for Block tags. - -Contains BlockTag dataclass, parsing functions, and block matching utilities. -""" - -from __future__ import annotations - -from dataclasses import dataclass -from typing import TYPE_CHECKING, Literal, Protocol - -if TYPE_CHECKING: - pass - - -class HasComment(Protocol): - """Protocol for objects that have a comment and optional memory_type attribute.""" - - comment: str - memory_type: str | None - - -@dataclass -class BlockRange: - """A matched block range with start/end indices and metadata. - - Represents a complete block from opening to closing tag (or self-closing). - """ - - start_idx: int - end_idx: int # Same as start_idx for self-closing tags - name: str - bg_color: str | None - memory_type: str | None = None # Memory type for filtering in interleaved views - - -@dataclass -class BlockTag: - """Result of parsing a block tag from a comment. - - Block tags mark sections in the Address Editor: - - - opening tag for a range - - - closing tag for a range - - - self-closing tag for a singular point - - - opening tag with background color - """ - - name: str | None - tag_type: Literal["open", "close", "self-closing"] | None - remaining_text: str - bg_color: str | None - - -def _extract_bg_attribute(tag_content: str) -> tuple[str, str | None]: - """Extract bg attribute from tag content. - - Args: - tag_content: The content between < and > (e.g., 'Name bg="#FFCDD2"') - - Returns: - Tuple of (name_part, bg_color) - - name_part: Tag content with bg attribute removed - - bg_color: The color value, or None if not present - """ - import re - - # Look for bg="..." or bg='...' - match = re.search(r'\s+bg=["\']([^"\']+)["\']', tag_content) - if match: - bg_color = match.group(1) - # Remove the bg attribute from the tag content - name_part = tag_content[: match.start()] + tag_content[match.end() :] - return name_part.strip(), bg_color - return tag_content, None - - -def _is_valid_tag_name(name: str) -> bool: - """Check if a tag name is valid (not a mathematical expression). - - Valid names must contain at least one letter. This prevents expressions - like '< 5 >' or '< 10 >' from being parsed as tags. - - Args: - name: The potential tag name to check - - Returns: - True if the name contains at least one letter - """ - return any(c.isalpha() for c in name) - - -def _try_parse_tag_at(comment: str, start_pos: int) -> BlockTag | None: - """Try to parse a block tag starting at the given position. - - Args: - comment: The full comment string - start_pos: Position of the '<' character - - Returns: - BlockTag if valid tag found, None otherwise - """ - end = comment.find(">", start_pos) - if end == -1: - return None - - tag_content = comment[start_pos + 1 : end] # content between < and > - - # Empty tag <> is invalid - if not tag_content or not tag_content.strip(): - return None - - # Calculate remaining text (text before + text after the tag) - text_before = comment[:start_pos] - text_after = comment[end + 1 :] - remaining = text_before + text_after - - # Self-closing: or - if tag_content.rstrip().endswith("/"): - content_without_slash = tag_content.rstrip()[:-1].strip() - name_part, bg_color = _extract_bg_attribute(content_without_slash) - name = name_part.strip() - if name and _is_valid_tag_name(name): - return BlockTag(name, "self-closing", remaining, bg_color) - return None - - # Closing: (no bg attribute on closing tags) - if tag_content.startswith("/"): - name = tag_content[1:].strip() - if name and _is_valid_tag_name(name): - return BlockTag(name, "close", remaining, None) - return None - - # Opening: or - name_part, bg_color = _extract_bg_attribute(tag_content) - name = name_part.strip() - if name and _is_valid_tag_name(name): - return BlockTag(name, "open", remaining, bg_color) - - return None - - -def parse_block_tag(comment: str) -> BlockTag: - """Parse block tag from anywhere in a comment. - - Block tags mark sections in the Address Editor: - - - opening tag for a range (can have text before/after) - - - closing tag for a range (can have text before/after) - - - self-closing tag for a singular point - - - opening tag with background color - - - self-closing tag with background color - - The function searches for tags anywhere in the comment, not just at the start. - Mathematical expressions like '< 5 >' are not parsed as tags. - - Args: - comment: The comment string to parse - - Returns: - BlockTag with name, tag_type, remaining_text, and bg_color - """ - if not comment: - return BlockTag(None, None, "", None) - - # Search for '<' anywhere in the comment - pos = 0 - while True: - start_pos = comment.find("<", pos) - if start_pos == -1: - break - - result = _try_parse_tag_at(comment, start_pos) - if result is not None: - return result - - # Try next '<' character - pos = start_pos + 1 - - return BlockTag(None, None, comment, None) - - -def get_block_type(comment: str) -> str | None: - """Determine the type of block tag in a comment. - - Args: - comment: The comment string to check - - Returns: - 'open' for , 'close' for , - 'self-closing' for , or None if not a block tag - """ - return parse_block_tag(comment).tag_type - - -def is_block_tag(comment: str) -> bool: - """Check if a comment starts with a block tag (any type). - - Block tags mark sections in the Address Editor: - - - opening tag for a range - - - closing tag for a range - - - self-closing tag for a singular point - - Args: - comment: The comment string to check - - Returns: - True if the comment starts with any type of block tag - """ - return get_block_type(comment) is not None - - -def extract_block_name(comment: str) -> str | None: - """Extract block name from a comment that starts with a block tag. - - Args: - comment: The comment string (e.g., "Valve info", "", "") - - Returns: - The block name (e.g., "Motor", "Spare"), or None if no tag - """ - return parse_block_tag(comment).name - - -def strip_block_tag(comment: str) -> str: - """Strip block tag from a comment, returning any text after the tag. - - Args: - comment: The comment string (e.g., "Valve info") - - Returns: - Text after the tag (e.g., "Valve info"), or original if no tag - """ - if not comment: - return "" - block_tag = parse_block_tag(comment) - if block_tag.tag_type is not None: - return block_tag.remaining_text - return comment - - -def format_block_tag( - name: str, - tag_type: Literal["open", "close", "self-closing"], - bg_color: str | None = None, -) -> str: - """Format a block tag string from its components. - - Args: - name: The block name (e.g., "Motor", "Alarms") - tag_type: "open", "close", or "self-closing" - bg_color: Optional background color (e.g., "#FFCDD2", "Red") - - Returns: - Formatted block tag string: - - open: "" or "" - - close: "" - - self-closing: "" or "" - """ - bg_attr = f' bg="{bg_color}"' if bg_color else "" - - if tag_type == "self-closing": - return f"<{name}{bg_attr} />" - elif tag_type == "close": - return f"" - else: # open - return f"<{name}{bg_attr}>" - - -# ============================================================================= -# Multi-Row Block Operations -# ============================================================================= -# NOTE: Multi-row block operations (find_paired_tag_index, find_block_range_indices, -# compute_all_block_ranges, validate_block_span) have been moved to -# services/block_service.py to maintain separation of concerns. -# This module now contains only single-comment parsing operations. +"""Block tag model — re-exports from pyclickplc.blocks.""" + +from pyclickplc.blocks import BlockRange as BlockRange +from pyclickplc.blocks import BlockTag as BlockTag +from pyclickplc.blocks import HasComment as HasComment +from pyclickplc.blocks import compute_all_block_ranges as compute_all_block_ranges +from pyclickplc.blocks import extract_block_name as extract_block_name +from pyclickplc.blocks import find_block_range_indices as find_block_range_indices +from pyclickplc.blocks import find_paired_tag_index as find_paired_tag_index +from pyclickplc.blocks import format_block_tag as format_block_tag +from pyclickplc.blocks import get_all_block_names as get_all_block_names +from pyclickplc.blocks import get_block_type as get_block_type +from pyclickplc.blocks import is_block_name_available as is_block_name_available +from pyclickplc.blocks import is_block_tag as is_block_tag +from pyclickplc.blocks import parse_block_tag as parse_block_tag +from pyclickplc.blocks import strip_block_tag as strip_block_tag +from pyclickplc.blocks import validate_block_span as validate_block_span diff --git a/src/clicknick/models/constants.py b/src/clicknick/models/constants.py deleted file mode 100644 index 33cf880..0000000 --- a/src/clicknick/models/constants.py +++ /dev/null @@ -1,180 +0,0 @@ -# ============================================================================== -# Address Memory Map Configuration -# ============================================================================== - -# Address ranges by memory type (start, end inclusive) - these are MDB addresses -# For XD/YD: MDB 0=XD0, 1=XD0u, 2=XD1, 4=XD2, 6=XD3, 8=XD4, 10=XD5, 12=XD6, 14=XD7, 16=XD8 -# Hidden slots (3,5,7,9,11,13,15) are upper bytes for XD1-8 that aren't displayed -from enum import IntEnum - -ADDRESS_RANGES: dict[str, tuple[int, int]] = { - "X": (1, 816), # Inputs - "Y": (1, 816), # Outputs - "C": (1, 2000), # Internal relays - "T": (1, 500), # Timers - "CT": (1, 250), # Counters - "SC": (1, 1000), # Special relays - "DS": (1, 4500), # Data registers (16-bit) - "DD": (1, 1000), # Double data registers (32-bit) - "DH": (1, 500), # Hex data registers - "DF": (1, 500), # Float data registers - "XD": (0, 16), # Input groups: XD0, XD0u, XD1-XD8 (MDB 0-16, skip odd > 1) - "YD": (0, 16), # Output groups: YD0, YD0u, YD1-YD8 (MDB 0-16, skip odd > 1) - "TD": (1, 500), # Timer data - "CTD": (1, 250), # Counter data - "SD": (1, 1000), # Special data registers - "TXT": (1, 1000), # Text registers -} -# Base values for AddrKey calculation (Primary Key in MDB) -MEMORY_TYPE_BASES: dict[str, int] = { - "X": 0x0000000, - "Y": 0x1000000, - "C": 0x2000000, - "T": 0x3000000, - "CT": 0x4000000, - "SC": 0x5000000, - "DS": 0x6000000, - "DD": 0x7000000, - "DH": 0x8000000, - "DF": 0x9000000, - "XD": 0xA000000, - "YD": 0xB000000, - "TD": 0xC000000, - "CTD": 0xD000000, - "SD": 0xE000000, - "TXT": 0xF000000, -} -# Reverse mapping: type_index -> memory_type -_INDEX_TO_TYPE: dict[int, str] = {v >> 24: k for k, v in MEMORY_TYPE_BASES.items()} - - -# ============================================================================== -# Data Type Configuration -# ============================================================================== - - -class DataType(IntEnum): - """DataType mapping from MDB database.""" - - BIT = 0 # C, CT, SC, T, X, Y - values: "0" or "1" - INT = 1 # DS, SD, TD - 16-bit signed: -32768 to 32767 - INT2 = 2 # CTD, DD - 32-bit signed: -2147483648 to 2147483647 - FLOAT = 3 # DF - float: -3.4028235E+38 to 3.4028235E+38 - HEX = 4 # DH, XD, YD - hex string: "0000" to "FFFF" - TXT = 6 # TXT - single ASCII character - - -# Display names for DataType values -DATA_TYPE_DISPLAY: dict[int, str] = { - DataType.BIT: "BIT", - DataType.INT: "INT", - DataType.INT2: "INT2", - DataType.FLOAT: "FLOAT", - DataType.HEX: "HEX", - DataType.TXT: "TXT", -} - -# Hint text for initial value fields by DataType -DATA_TYPE_HINTS: dict[int, str] = { - DataType.BIT: "0 or 1 (checkbox)", - DataType.INT: "Range: `-32768` to `32767`", - DataType.INT2: "Range: `-2147483648` to `2147483647`", - DataType.FLOAT: "Range: `-3.4028235E+38` to `3.4028235E+38`", - DataType.HEX: "Range: '0000' to 'FFFF'", - DataType.TXT: "Single ASCII char: eg 'A'", -} - -# Memory types that are exclusively BIT type (no combined types like T/TD) -BIT_ONLY_TYPES: frozenset[str] = frozenset({"X", "Y", "C", "SC"}) -# DataType by memory type -MEMORY_TYPE_TO_DATA_TYPE: dict[str, int] = { - "X": DataType.BIT, - "Y": DataType.BIT, - "C": DataType.BIT, - "T": DataType.BIT, - "CT": DataType.BIT, - "SC": DataType.BIT, - "DS": DataType.INT, - "SD": DataType.INT, - "TD": DataType.INT, - "DD": DataType.INT2, - "CTD": DataType.INT2, - "DF": DataType.FLOAT, - "DH": DataType.HEX, - "XD": DataType.HEX, - "YD": DataType.HEX, - "TXT": DataType.TXT, -} -# ============================================================================== -# Validation Rules & Constraints -# ============================================================================== - -# Validation constants -NICKNAME_MAX_LENGTH = 24 -COMMENT_MAX_LENGTH = 128 -# Characters forbidden in nicknames -# Note: Space is allowed, hyphen (-) and period (.) are forbidden -FORBIDDEN_CHARS = set("%\"<>!#$&'()*+-./:;=?@[\\]^`{|}~") - -RESERVED_NICKNAMES = { - "log", "sum", "sin", "asin", "rad", "cos", "acos", "sqrt", "deg", - "tan", "atan", "ln", "pi", "mod", "and", "or", "xor", "lsh", "rsh", - "lro", "rro", "log" -} - -# Value ranges for validation -INT_MIN = -32768 -INT_MAX = 32767 -INT2_MIN = -2147483648 -INT2_MAX = 2147483647 -FLOAT_MIN = -3.4028235e38 -FLOAT_MAX = 3.4028235e38 -# Memory types where InitialValue/Retentive cannot be edited -# System types: values are fixed by CLICK software -NON_EDITABLE_TYPES: frozenset[str] = frozenset({"SC", "SD", "XD", "YD"}) -# ============================================================================== -# Interleaved Type Pairs (T↔TD, CT↔CTD) -# ============================================================================== -# These memory types are "paired" and interleaved in unified view: -# - T (Timer bit) pairs with TD (Timer data/value) -# - CT (Counter bit) pairs with CTD (Counter data/value) - -# Canonical set of interleaved type pairs -INTERLEAVED_TYPE_PAIRS: frozenset[frozenset[str]] = frozenset( - { - frozenset({"T", "TD"}), - frozenset({"CT", "CTD"}), - } -) - -# Bidirectional lookup for interleaved pairs -INTERLEAVED_PAIRS: dict[str, str] = { - "T": "TD", - "TD": "T", - "CT": "CTD", - "CTD": "CT", -} - -# Memory types that share retentive with their paired type (TD↔T, CTD↔CT) -# Retentive edits on these types update the paired type instead -# Note: One-directional (TD->T, CTD->CT) because retentive is stored on T/CT -PAIRED_RETENTIVE_TYPES: dict[str, str] = {"TD": "T", "CTD": "CT"} -# Default retentive values by memory type (from CLICK documentation) -DEFAULT_RETENTIVE: dict[str, bool] = { - "X": False, - "Y": False, - "C": False, - "T": False, - "CT": True, # Counters are retentive by default - "SC": False, # Can't change - "DS": True, # Data registers are retentive by default - "DD": True, - "DH": True, - "DF": True, - "XD": False, # Can't change - "YD": False, # Can't change - "TD": False, # See note - "CTD": True, # See note - "SD": False, # Can't change - "TXT": True, -} diff --git a/src/clicknick/models/dataview_row.py b/src/clicknick/models/dataview_row.py index fd3c177..36be2a3 100644 --- a/src/clicknick/models/dataview_row.py +++ b/src/clicknick/models/dataview_row.py @@ -1,396 +1,12 @@ -"""Data model for the Dataview Editor. - -Defines the DataviewRow dataclass and type code mappings for CLICK PLC DataView files. -""" +"""DataView model re-exports from pyclickplc.dataview.""" from __future__ import annotations -import re -from dataclasses import dataclass, field - - -# Type codes used in CDV files to identify address types -class TypeCode: - """Type codes for CDV file format.""" - - BIT = 768 - INT = 0 - INT2 = 256 - HEX = 3 - FLOAT = 257 - TXT = 1024 - - -# Map memory type prefixes to their type codes -MEMORY_TYPE_TO_CODE: dict[str, int] = { - "X": TypeCode.BIT, - "Y": TypeCode.BIT, - "C": TypeCode.BIT, - "T": TypeCode.BIT, - "CT": TypeCode.BIT, - "SC": TypeCode.BIT, - "DS": TypeCode.INT, - "TD": TypeCode.INT, - "SD": TypeCode.INT, - "DD": TypeCode.INT2, - "CTD": TypeCode.INT2, - "DH": TypeCode.HEX, - "XD": TypeCode.HEX, - "YD": TypeCode.HEX, - "DF": TypeCode.FLOAT, - "TXT": TypeCode.TXT, -} - -# Reverse mapping: type code to list of memory types -CODE_TO_MEMORY_TYPES: dict[int, list[str]] = { - TypeCode.BIT: ["X", "Y", "C", "T", "CT", "SC"], - TypeCode.INT: ["DS", "TD", "SD"], - TypeCode.INT2: ["DD", "CTD"], - TypeCode.HEX: ["DH", "XD", "YD"], - TypeCode.FLOAT: ["DF"], - TypeCode.TXT: ["TXT"], -} - -# SC addresses that are writable (most SC are read-only system controls) -WRITABLE_SC: frozenset[int] = frozenset({50, 51, 53, 55, 60, 61, 65, 66, 67, 75, 76, 120, 121}) - -# SD addresses that are writable (most SD are read-only system data) -WRITABLE_SD: frozenset[int] = frozenset( - { - 29, - 31, - 32, - 34, - 35, - 36, - 40, - 41, - 42, - 50, - 51, - 60, - 61, - 106, - 107, - 108, - 112, - 113, - 114, - 140, - 141, - 142, - 143, - 144, - 145, - 146, - 147, - 214, - 215, - } -) - -# Max rows in a dataview -MAX_DATAVIEW_ROWS = 100 - -# Regex to parse address strings like "X001", "DS1", "CTD250" -ADDRESS_PATTERN = re.compile(r"^([A-Z]+)(\d+[uU]?)$", re.IGNORECASE) - - -def parse_address(address: str) -> tuple[str, str] | None: - """Parse an address string into memory type and number. - - Args: - address: Address string like "X001", "DS1", "XD0u" - - Returns: - Tuple of (memory_type, number_part) or None if invalid. - Number part includes 'u' suffix for upper byte if present. - """ - if not address: - return None - match = ADDRESS_PATTERN.match(address.strip()) - if not match: - return None - return (match.group(1).upper(), match.group(2)) - - -def get_type_code_for_address(address: str) -> int | None: - """Get the type code for an address. - - Args: - address: Address string like "X001", "DS1" - - Returns: - Type code or None if address is invalid. - """ - parsed = parse_address(address) - if not parsed: - return None - memory_type, _ = parsed - return MEMORY_TYPE_TO_CODE.get(memory_type) - - -def is_address_writable(address: str) -> bool: - """Check if an address is writable (can have a New Value set). - - Most addresses are writable, but SC and SD have specific writable addresses. - XD and YD are read-only. - - Args: - address: Address string like "X001", "SC50" - - Returns: - True if the address can have a New Value written to it. - """ - parsed = parse_address(address) - if not parsed: - return False - - memory_type, number_part = parsed - - # XD and YD are read-only - if memory_type in ("XD", "YD"): - return False - - # SC has specific writable addresses - if memory_type == "SC": - try: - addr_num = int(number_part) - return addr_num in WRITABLE_SC - except ValueError: - return False - - # SD has specific writable addresses - if memory_type == "SD": - try: - addr_num = int(number_part) - return addr_num in WRITABLE_SD - except ValueError: - return False - - # All other addresses are writable - return True - - -@dataclass -class DataviewRow: - """Represents a single row in a CLICK DataView. - - A dataview row contains an address to monitor and optionally a new value - to write to that address. The nickname and comment are display-only - fields populated from SharedAddressData. - """ - - # Core data (stored in CDV file) - address: str = "" # e.g., "X001", "DS1", "CTD250" - type_code: int = 0 # Type code for the address - new_value: str = "" # Optional new value to write - - # Display-only fields (populated from SharedAddressData) - nickname: str = field(default="", compare=False) - comment: str = field(default="", compare=False) - - @property - def is_empty(self) -> bool: - """Check if this row is empty (no address set).""" - return not self.address.strip() - - @property - def is_writable(self) -> bool: - """Check if this address can have a New Value written to it.""" - return is_address_writable(self.address) - - @property - def memory_type(self) -> str | None: - """Get the memory type prefix (X, Y, DS, etc.) or None if invalid.""" - parsed = parse_address(self.address) - return parsed[0] if parsed else None - - @property - def address_number(self) -> str | None: - """Get the address number part or None if invalid.""" - parsed = parse_address(self.address) - return parsed[1] if parsed else None - - def update_type_code(self) -> bool: - """Update the type code based on the current address. - - Returns: - True if type code was updated, False if address is invalid. - """ - code = get_type_code_for_address(self.address) - if code is not None: - self.type_code = code - return True - return False - - def clear(self) -> None: - """Clear all fields in this row.""" - self.address = "" - self.type_code = 0 - self.new_value = "" - self.nickname = "" - self.comment = "" - - -def create_empty_dataview(count: int = MAX_DATAVIEW_ROWS) -> list[DataviewRow]: - """Create a new empty dataview with the specified number of rows. - - Args: - count: Number of rows to create (default MAX_DATAVIEW_ROWS). - - Returns: - List of empty DataviewRow objects. - """ - return [DataviewRow() for _ in range(count)] - - -# --- New Value Conversion Functions --- -# CDV files store values in specific formats that need conversion for display - - -def storage_to_display(value: str, type_code: int) -> str: - """Convert a stored CDV value to display format. - - Args: - value: The raw value from the CDV file - type_code: The type code (TypeCode.BIT, TypeCode.INT, etc.) - - Returns: - Human-readable display value - """ - if not value: - return "" - - try: - if type_code == TypeCode.BIT: - # BIT: 0 or 1 - return "1" if value == "1" else "0" - - elif type_code == TypeCode.INT: - # INT (16-bit signed): Stored as unsigned 32-bit with sign extension - # Convert back to signed 16-bit - unsigned_val = int(value) - # Mask to 16 bits and convert to signed - val_16bit = unsigned_val & 0xFFFF - if val_16bit >= 0x8000: - val_16bit -= 0x10000 - return str(val_16bit) - - elif type_code == TypeCode.INT2: - # INT2 (32-bit signed): Stored as unsigned 32-bit - # Convert back to signed 32-bit - unsigned_val = int(value) - if unsigned_val >= 0x80000000: - unsigned_val -= 0x100000000 - return str(unsigned_val) - - elif type_code == TypeCode.HEX: - # HEX: Stored as decimal, display as 4-digit hex with leading zeros - decimal_val = int(value) - return format(decimal_val, "04X") - - elif type_code == TypeCode.FLOAT: - # FLOAT: Stored as IEEE 754 32-bit integer representation - import struct - - int_val = int(value) - # Convert integer to bytes (unsigned 32-bit) - bytes_val = struct.pack(">I", int_val & 0xFFFFFFFF) - # Interpret as big-endian float - float_val = struct.unpack(">f", bytes_val)[0] - # Format nicely (avoid excessive decimals) - if float_val == int(float_val): - return str(int(float_val)) - return f"{float_val:.6g}" - - elif type_code == TypeCode.TXT: - # TXT: Stored as ASCII code, display as character - ascii_code = int(value) - if 32 <= ascii_code <= 126: # Printable ASCII - return chr(ascii_code) - return str(ascii_code) # Non-printable: show as number - - else: - return value - - except (ValueError, struct.error): - return value - - -def display_to_storage(value: str, type_code: int) -> str: - """Convert a display value to CDV storage format. - - Args: - value: The human-readable display value - type_code: The type code (TypeCode.BIT, TypeCode.INT, etc.) - - Returns: - Value formatted for CDV file storage - """ - if not value: - return "" - - try: - if type_code == TypeCode.BIT: - # BIT: 0 or 1 - return "1" if value in ("1", "True", "true", "ON", "on") else "0" - - elif type_code == TypeCode.INT: - # INT (16-bit signed): Convert to unsigned 32-bit with sign extension - signed_val = int(value) - # Clamp to 16-bit signed range - signed_val = max(-32768, min(32767, signed_val)) - # Convert to unsigned 32-bit representation - if signed_val < 0: - unsigned_val = signed_val + 0x100000000 - else: - unsigned_val = signed_val - return str(unsigned_val) - - elif type_code == TypeCode.INT2: - # INT2 (32-bit signed): Convert to unsigned 32-bit - signed_val = int(value) - # Clamp to 32-bit signed range - signed_val = max(-2147483648, min(2147483647, signed_val)) - if signed_val < 0: - unsigned_val = signed_val + 0x100000000 - else: - unsigned_val = signed_val - return str(unsigned_val) - - elif type_code == TypeCode.HEX: - # HEX: Convert hex string to decimal - # Support with or without 0x prefix - hex_val = value.strip() - if hex_val.lower().startswith("0x"): - hex_val = hex_val[2:] - decimal_val = int(hex_val, 16) - return str(decimal_val) - - elif type_code == TypeCode.FLOAT: - # FLOAT: Convert to IEEE 754 32-bit integer representation - import struct - - float_val = float(value) - # Convert float to bytes - bytes_val = struct.pack(">f", float_val) - # Interpret as unsigned 32-bit integer - int_val = struct.unpack(">I", bytes_val)[0] - return str(int_val) - - elif type_code == TypeCode.TXT: - # TXT: Convert character to ASCII code - if len(value) == 1: - return str(ord(value)) - # If it's already a number, keep it - return str(int(value)) - - else: - return value - - except (ValueError, struct.error): - return value - - -# Import struct at module level for efficiency +from pyclickplc.banks import DataType as DataType +from pyclickplc.dataview import MAX_DATAVIEW_ROWS as MAX_DATAVIEW_ROWS +from pyclickplc.dataview import WRITABLE_SC as WRITABLE_SC +from pyclickplc.dataview import WRITABLE_SD as WRITABLE_SD +from pyclickplc.dataview import DataViewRecord as DataViewRecord +from pyclickplc.dataview import create_empty_dataview as create_empty_dataview +from pyclickplc.dataview import get_data_type_for_address as get_data_type_for_address +from pyclickplc.dataview import is_address_writable as is_address_writable diff --git a/src/clicknick/models/nickname.py b/src/clicknick/models/nickname.py index 956db92..236ef25 100644 --- a/src/clicknick/models/nickname.py +++ b/src/clicknick/models/nickname.py @@ -1,6 +1,6 @@ from dataclasses import dataclass -from .constants import DEFAULT_RETENTIVE +from pyclickplc.banks import DEFAULT_RETENTIVE @dataclass diff --git a/src/clicknick/models/validation.py b/src/clicknick/models/validation.py index 98e578b..d4b4d02 100644 --- a/src/clicknick/models/validation.py +++ b/src/clicknick/models/validation.py @@ -1,55 +1,39 @@ from collections.abc import Callable -from .constants import ( - COMMENT_MAX_LENGTH, - FLOAT_MAX, - FLOAT_MIN, - FORBIDDEN_CHARS, - INT2_MAX, - INT2_MIN, - INT_MAX, - INT_MIN, - NICKNAME_MAX_LENGTH, - DataType, - RESERVED_NICKNAMES, -) - - -def validate_nickname_format(nickname: str) -> tuple[bool, str]: +from pyclickplc.validation import COMMENT_MAX_LENGTH +from pyclickplc.validation import validate_initial_value as validate_initial_value +from pyclickplc.validation import validate_nickname as _pyclickplc_validate_nickname + + +def validate_nickname_format( + nickname: str, *, system_bank: str | None = None +) -> tuple[bool, str]: """Validate nickname format (length, characters, etc.) without uniqueness check. Args: nickname: The nickname to validate + system_bank: Optional system bank hint (e.g. "SC", "SD", "X") for + pyclickplc's bank-specific system rules. Returns: Tuple of (is_valid, error_message) - error_message is "" if valid """ - if nickname == "": - return True, "" # Empty is valid (just means unassigned) - - if len(nickname) > NICKNAME_MAX_LENGTH: - return False, f"Too long ({len(nickname)}/24)" - - if nickname.startswith("_"): - return False, "Cannot start with _" - - if nickname.lower() in RESERVED_NICKNAMES: - return False, "Reserved keyword" + return _pyclickplc_validate_nickname(nickname, system_bank=system_bank) - invalid_chars = set(nickname) & FORBIDDEN_CHARS - if invalid_chars: - # Show first few invalid chars - chars_display = "".join(sorted(invalid_chars)[:3]) - return False, f"Invalid: {chars_display}" - return True, "" - - -def validate_comment(comment: str) -> tuple[bool, str]: - """Validate comment length. +def validate_comment( + comment: str, + is_block_name_duplicate: Callable[[str, int], bool] | None = None, + current_addr_key: int = 0, +) -> tuple[bool, str]: + """Validate comment length and block name uniqueness. Args: comment: The comment to validate + is_block_name_duplicate: Optional callback to check if a block name is already + in use. Signature: (block_name, exclude_addr_key) -> bool + current_addr_key: The addr_key of the row being validated (excluded from + duplicate check) Returns: Tuple of (is_valid, error_message) - error_message is "" if valid @@ -60,6 +44,15 @@ def validate_comment(comment: str) -> tuple[bool, str]: if len(comment) > COMMENT_MAX_LENGTH: return False, f"Too long ({len(comment)}/128)" + # Check for duplicate block name if checker provided + if is_block_name_duplicate is not None: + # Import here to avoid circular dependency + from pyclickplc.blocks import parse_block_tag + + tag = parse_block_tag(comment) + if tag.name and is_block_name_duplicate(tag.name, current_addr_key): + return False, f"Duplicate block: {tag.name}" + return True, "" @@ -68,6 +61,8 @@ def validate_nickname( all_nicknames: dict[int, str], current_addr_key: int, is_duplicate_fn: Callable[[str, int], bool] | None = None, + *, + system_bank: str | None = None, ) -> tuple[bool, str]: """Validate a nickname against all rules. @@ -77,12 +72,14 @@ def validate_nickname( current_addr_key: The addr_key of the row being validated (excluded from uniqueness) is_duplicate_fn: Optional O(1) duplicate checker function(nickname, exclude_addr_key) -> bool. If provided, uses this instead of O(n) scan of all_nicknames. + system_bank: Optional system bank hint (e.g. "SC", "SD", "X") for + pyclickplc's bank-specific system rules. Returns: Tuple of (is_valid, error_message) - error_message is "" if valid """ # Check format first - is_valid, error = validate_nickname_format(nickname) + is_valid, error = validate_nickname_format(nickname, system_bank=system_bank) if not is_valid: return is_valid, error @@ -102,76 +99,3 @@ def validate_nickname( return False, "Duplicate" return True, "" - - -def validate_initial_value( - initial_value: str, - data_type: int, -) -> tuple[bool, str]: - """Validate an initial value against the data type rules. - - Args: - initial_value: The initial value string to validate - data_type: The DataType number (0=bit, 1=int, 2=int2, 3=float, 4=hex, 6=txt) - - Returns: - Tuple of (is_valid, error_message) - error_message is "" if valid - """ - if initial_value == "": - return True, "" # Empty is valid (means no initial value set) - - if data_type == DataType.BIT: - if initial_value not in ("0", "1"): - return False, "Must be 0 or 1" - return True, "" - - elif data_type == DataType.INT: - try: - val = int(initial_value) - if val < INT_MIN or val > INT_MAX: - return False, f"Range: {INT_MIN} to {INT_MAX}" - return True, "" - except ValueError: - return False, "Must be integer" - - elif data_type == DataType.INT2: - try: - val = int(initial_value) - if val < INT2_MIN or val > INT2_MAX: - return False, f"Range: {INT2_MIN} to {INT2_MAX}" - return True, "" - except ValueError: - return False, "Must be integer" - - elif data_type == DataType.FLOAT: - try: - val = float(initial_value) - # Allow scientific notation like -3.4028235E+38 - if val < FLOAT_MIN or val > FLOAT_MAX: - return False, "Out of float range" - return True, "" - except ValueError: - return False, "Must be number" - - elif data_type == DataType.HEX: - # Hex values should be 4 hex digits (0000 to FFFF) - if len(initial_value) > 4: - return False, "Max 4 hex digits" - try: - val = int(initial_value, 16) - if val < 0 or val > 0xFFFF: - return False, "Range: 0000 to FFFF" - return True, "" - except ValueError: - return False, "Must be hex (0-9, A-F)" - - elif data_type == DataType.TXT: - # Single ASCII character (7-bit) - if len(initial_value) != 1: - return False, "Must be single char" - if ord(initial_value) > 127: - return False, "Must be ASCII" - return True, "" - - # Unknown data type - return True, "" diff --git a/src/clicknick/services/block_service.py b/src/clicknick/services/block_service.py index 53f515f..47c7b72 100644 --- a/src/clicknick/services/block_service.py +++ b/src/clicknick/services/block_service.py @@ -1,8 +1,7 @@ """Block service for coordinating block tag operations and colors. -Contains multi-row block operations extracted from models/blocktag.py. -Single-comment parsing remains in blocktag.py; this service handles -operations that span multiple rows. +This service provides editor-level coordination for block tags. +Core block tag model operations (parsing, range computation) are in models/blocktag.py. """ from __future__ import annotations @@ -10,214 +9,86 @@ from dataclasses import replace from typing import TYPE_CHECKING -from ..models.blocktag import ( +from pyclickplc.blocks import ( BlockRange, HasComment, + compute_all_block_ranges, + find_block_range_indices, + find_paired_tag_index, format_block_tag, + get_all_block_names, + is_block_name_available, parse_block_tag, strip_block_tag, + validate_block_span, ) -from ..models.constants import INTERLEAVED_TYPE_PAIRS if TYPE_CHECKING: + from pyclickplc.blocks import BlockTag + from ..data.address_store import AddressStore from ..models.address_row import AddressRow - from ..models.blocktag import BlockTag - - -# ============================================================================= -# Multi-Row Block Operations (extracted from blocktag.py) -# ============================================================================= - - -def find_paired_tag_index( - rows: list[HasComment], row_idx: int, tag: BlockTag | None = None -) -> int | None: - """Find the row index of the paired open/close block tag. - - Uses nesting depth to correctly match tags when there are multiple - blocks with the same name (nested or separate sections). - - Only matches tags within the same memory_type (if available) to correctly - handle interleaved views like T/TD where each type has its own tags. - - Args: - rows: List of objects with .comment and optional .memory_type attributes - row_idx: Index of the row containing the tag - tag: Parsed BlockTag, or None to parse from rows[row_idx].comment - - Returns: - Row index of the paired tag, or None if not found - """ - if tag is None: - tag = parse_block_tag(rows[row_idx].comment) - - if not tag.name or tag.tag_type == "self-closing": - return None - - # Get memory type of source row (if available) for filtering - source_type = getattr(rows[row_idx], "memory_type", None) - - if tag.tag_type == "open": - # Search forward for matching close tag, respecting nesting - depth = 1 - for i in range(row_idx + 1, len(rows)): - # Skip rows with different memory type - if source_type and getattr(rows[i], "memory_type", None) != source_type: - continue - other_tag = parse_block_tag(rows[i].comment) - if other_tag.name == tag.name: - if other_tag.tag_type == "open": - depth += 1 - elif other_tag.tag_type == "close": - depth -= 1 - if depth == 0: - return i - elif tag.tag_type == "close": - # Search backward for matching open tag, respecting nesting - depth = 1 - for i in range(row_idx - 1, -1, -1): - # Skip rows with different memory type - if source_type and getattr(rows[i], "memory_type", None) != source_type: - continue - other_tag = parse_block_tag(rows[i].comment) - if other_tag.name == tag.name: - if other_tag.tag_type == "close": - depth += 1 - elif other_tag.tag_type == "open": - depth -= 1 - if depth == 0: - return i - return None - - -def find_block_range_indices( - rows: list[HasComment], row_idx: int, tag: BlockTag | None = None -) -> tuple[int, int] | None: - """Find the (start_idx, end_idx) range for a block tag. - - Uses nesting depth to correctly match tags when there are multiple - blocks with the same name. - - Args: - rows: List of objects with a .comment attribute - row_idx: Index of the row containing the tag - tag: Parsed BlockTag, or None to parse from rows[row_idx].comment - - Returns: - Tuple of (start_idx, end_idx) inclusive, or None if tag is invalid - """ - if tag is None: - tag = parse_block_tag(rows[row_idx].comment) - - if not tag.name or not tag.tag_type: - return None - - if tag.tag_type == "self-closing": - return (row_idx, row_idx) - - if tag.tag_type == "open": - paired_idx = find_paired_tag_index(rows, row_idx, tag) - if paired_idx is not None: - return (row_idx, paired_idx) - # No close found - just the opening row - return (row_idx, row_idx) - - if tag.tag_type == "close": - paired_idx = find_paired_tag_index(rows, row_idx, tag) - if paired_idx is not None: - return (paired_idx, row_idx) - # No open found - just the closing row - return (row_idx, row_idx) - - return None - - -def compute_all_block_ranges(rows: list[HasComment]) -> list[BlockRange]: - """Compute all block ranges from a list of rows using stack-based matching. - - Correctly handles nested blocks and multiple blocks with the same name. - Only matches open/close tags within the same memory_type to handle - interleaved views like T/TD correctly. - - Args: - rows: List of objects with .comment and optional .memory_type attributes - - Returns: - List of BlockRange objects, sorted by start_idx - """ - ranges: list[BlockRange] = [] - - # Stack for tracking open tags: (memory_type, name) -> [(start_idx, bg_color), ...] - # Using (memory_type, name) as key ensures T's and TD's are separate - open_tags: dict[tuple[str | None, str], list[tuple[int, str | None]]] = {} - - for row_idx, row in enumerate(rows): - tag = parse_block_tag(row.comment) - if not tag.name: - continue - - memory_type = getattr(row, "memory_type", None) - stack_key = (memory_type, tag.name) - if tag.tag_type == "self-closing": - ranges.append(BlockRange(row_idx, row_idx, tag.name, tag.bg_color, memory_type)) - elif tag.tag_type == "open": - if stack_key not in open_tags: - open_tags[stack_key] = [] - open_tags[stack_key].append((row_idx, tag.bg_color)) - elif tag.tag_type == "close": - if stack_key in open_tags and open_tags[stack_key]: - start_idx, bg_color = open_tags[stack_key].pop() - ranges.append(BlockRange(start_idx, row_idx, tag.name, bg_color, memory_type)) - # Handle unclosed tags as singular points - for (mem_type, name), stack in open_tags.items(): - for start_idx, bg_color in stack: - ranges.append(BlockRange(start_idx, start_idx, name, bg_color, mem_type)) +# Re-export functions from blocktag for backwards compatibility +__all__ = [ + "BlockRange", + "BlockService", + "HasComment", + "compute_all_block_ranges", + "find_block_range_indices", + "find_paired_tag_index", + "get_all_block_names", + "is_block_name_available", + "validate_block_span", +] - # Sort by start index - ranges.sort(key=lambda r: r.start_idx) - return ranges +def _transform_block_name_for_pair(name: str, source_type: str, target_type: str) -> str: + """Transform block name for interleaved pair sync. -def validate_block_span(rows: list[AddressRow]) -> tuple[bool, str | None]: - """Validate that a block span doesn't cross memory type boundaries. - - Blocks should only contain addresses of the same memory type, - with the exception of paired types (T+TD, CT+CTD) which are - interleaved and can share blocks. + Convention: + - T/CT (base types) use plain names: "Pumps" + - TD/CTD (data types) use "_D" suffix: "Pumps_D" Args: - rows: List of AddressRow objects that would be in the block + name: Original block name + source_type: Memory type of source ("T", "TD", "CT", "CTD") + target_type: Memory type of target Returns: - Tuple of (is_valid, error_message). - - (True, None) if all rows have compatible memory types - - (False, error_message) if rows span incompatible memory types + Transformed block name for the target type """ - if not rows: - return True, None + # Determine if source and target are base or data types + base_types = {"T", "CT"} + data_types = {"TD", "CTD"} - # Get unique memory types in the selection - memory_types = {row.memory_type for row in rows} + source_is_base = source_type in base_types + target_is_base = target_type in base_types + source_is_data = source_type in data_types + target_is_data = target_type in data_types - if len(memory_types) == 1: - return True, None + if source_is_base and target_is_data: + # T -> TD or CT -> CTD: add _D suffix if not already present + if not name.endswith("_D"): + return name + "_D" + return name - # Check if it's a valid paired type combination - if frozenset(memory_types) in INTERLEAVED_TYPE_PAIRS: - return True, None + if source_is_data and target_is_base: + # TD -> T or CTD -> CT: remove _D suffix if present + if name.endswith("_D"): + return name[:-2] + return name - types_str = ", ".join(sorted(memory_types)) - return False, f"Blocks cannot span multiple memory types ({types_str})" + # Same type category (shouldn't happen in normal use) + return name class BlockService: """Coordinates block tag operations and precomputes block colors. - Wraps existing block tag parsing logic from models/blocktag.py. - All methods are static as the service is stateless. + Provides editor-level coordination for block tags. Core parsing and + range computation is delegated to models/blocktag.py. """ @staticmethod @@ -365,27 +236,56 @@ def apply_block_tag(source_comment: str, target_comment: str) -> str | None: return new_tag @staticmethod - def compute_block_colors_map(rows: list[AddressRow]) -> dict[int, str]: - """Compute block color map for a list of rows. + def apply_block_tag_for_interleaved_pair( + source_comment: str, + target_comment: str, + source_type: str, + target_type: str, + ) -> str | None: + """Apply block tag from source to target with _D suffix transformation. + + For T/TD and CT/CTD pairs, block names use a naming convention: + - T/CT blocks use base name (e.g., "Pumps") + - TD/CTD blocks use "_D" suffix (e.g., "Pumps_D") - This is a helper method for UI components that need to display - block colors without modifying AddressRow objects. + This ensures unique block names across all memory types while + maintaining the logical pairing between timer/counter pairs. Args: - rows: List of AddressRow objects + source_comment: The comment to apply FROM + target_comment: The comment to apply TO + source_type: Memory type of source (e.g., "T", "TD") + target_type: Memory type of target (e.g., "TD", "T") Returns: - Dict mapping row index (in rows list) to bg color string + The updated target comment, or None if no change needed """ - ranges = compute_all_block_ranges(rows) + source_tag = parse_block_tag(source_comment) + target_tag = parse_block_tag(target_comment) - # Filter to only blocks with colors - colored_ranges = [r for r in ranges if r.bg_color] + # Get target's non-block text (preserve it) + target_remaining = target_tag.remaining_text.strip() - # Build row_idx -> color map, with inner blocks overriding outer - color_map: dict[int, str] = {} - for r in colored_ranges: - for row_idx in range(r.start_idx, r.end_idx + 1): - color_map[row_idx] = r.bg_color + if source_tag.name is None: + # Source has no block tag - remove from target + if target_tag.name is None: + return None # No change needed + return target_remaining if target_remaining else "" + + # Transform the block name based on direction + transformed_name = _transform_block_name_for_pair(source_tag.name, source_type, target_type) - return color_map + # Check if block tags are already the same after transformation + if ( + transformed_name == target_tag.name + and source_tag.tag_type == target_tag.tag_type + and source_tag.bg_color == target_tag.bg_color + ): + return None # No change needed + + # Build new tag with transformed name + new_tag = format_block_tag(transformed_name, source_tag.tag_type, source_tag.bg_color) + if target_remaining: + return f"{target_remaining} {new_tag}" + else: + return new_tag diff --git a/src/clicknick/services/import_service.py b/src/clicknick/services/import_service.py index 4faff97..46b143c 100644 --- a/src/clicknick/services/import_service.py +++ b/src/clicknick/services/import_service.py @@ -8,8 +8,8 @@ from typing import TYPE_CHECKING -from ..models.address_row import get_addr_key -from ..models.blocktag import parse_block_tag, strip_block_tag +from pyclickplc.addresses import get_addr_key +from pyclickplc.blocks import parse_block_tag, strip_block_tag if TYPE_CHECKING: from ..data.shared_data import SharedAddressData diff --git a/src/clicknick/utils/mdb_operations.py b/src/clicknick/utils/mdb_operations.py index 5a572dd..4598da6 100644 --- a/src/clicknick/utils/mdb_operations.py +++ b/src/clicknick/utils/mdb_operations.py @@ -8,9 +8,9 @@ from typing import TYPE_CHECKING import pyodbc +from pyclickplc.banks import DEFAULT_RETENTIVE, MEMORY_TYPE_TO_DATA_TYPE, DataType from ..models.address_row import AddressRow -from ..models.constants import DEFAULT_RETENTIVE, MEMORY_TYPE_TO_DATA_TYPE, DataType from .mdb_shared import create_access_connection, find_click_database if TYPE_CHECKING: diff --git a/src/clicknick/utils/verification.py b/src/clicknick/utils/verification.py index 982b2b9..d0606b0 100644 --- a/src/clicknick/utils/verification.py +++ b/src/clicknick/utils/verification.py @@ -9,61 +9,37 @@ from pathlib import Path from typing import TYPE_CHECKING -from ..models.address_row import get_addr_key, is_xd_yd_hidden_slot, parse_addr_key -from ..models.constants import ( - ADDRESS_RANGES, - FLOAT_MAX, - FLOAT_MIN, - INT2_MAX, - INT2_MIN, - INT_MAX, - INT_MIN, - NON_EDITABLE_TYPES, - PAIRED_RETENTIVE_TYPES, -) -from ..models.dataview_row import ( - MEMORY_TYPE_TO_CODE, - TypeCode, - get_type_code_for_address, - is_address_writable, - parse_address, - storage_to_display, -) +from pyclickplc.addresses import get_addr_key, is_xd_yd_hidden_slot, parse_addr_key +from pyclickplc.banks import BANKS, NON_EDITABLE_TYPES, PAIRED_RETENTIVE_TYPES +from pyclickplc.validation import SYSTEM_NICKNAME_TYPES + from ..models.validation import validate_initial_value, validate_nickname +from ..views.dataview_editor.cdv_file import check_cdv_files if TYPE_CHECKING: from ..data.shared_data import SharedAddressData from ..models.address_row import AddressRow -# Memory types where underscore-prefixed nicknames are expected (system-defined) -SYSTEM_NICKNAME_TYPES: frozenset[str] = frozenset({"SC", "SD", "X"}) - - @dataclass class VerificationResult: """Results from verification process.""" mdb_issues: list[str] = field(default_factory=list) cdv_issues: list[str] = field(default_factory=list) - # Separate list for system nickname issues (underscore prefix in SC/SD/X) - system_nickname_issues: list[str] = field(default_factory=list) total_addresses: int = 0 cdv_files_checked: int = 0 @property def total_issues(self) -> int: - """Total number of actionable issues (excludes system nickname issues).""" return len(self.mdb_issues) + len(self.cdv_issues) @property def all_issues(self) -> list[str]: - """All actionable issues combined.""" return self.mdb_issues + self.cdv_issues @property def passed(self) -> bool: - """True if no actionable issues found.""" return self.total_issues == 0 @@ -86,11 +62,11 @@ def _check_retentive_pairs(all_rows: dict[int, AddressRow]) -> list[str]: for data_type, paired_type in PAIRED_RETENTIVE_TYPES.items(): # Get address range for the data type - addr_range = ADDRESS_RANGES.get(data_type) - if not addr_range: + bank = BANKS.get(data_type) + if not bank: continue - min_addr, max_addr = addr_range + min_addr, max_addr = bank.min_addr, bank.max_addr for addr_num in range(min_addr, max_addr + 1): # Get the data row (TD or CTD) data_key = get_addr_key(data_type, addr_num) @@ -110,13 +86,13 @@ def _check_retentive_pairs(all_rows: dict[int, AddressRow]) -> list[str]: return issues -def verify_mdb_addresses(shared_data: SharedAddressData) -> tuple[list[str], list[str]]: +def verify_mdb_addresses(shared_data: SharedAddressData) -> list[str]: """Verify all MDB addresses for validity. Checks: - addr_key can be parsed back correctly - Address is within valid range for memory type - - Nickname and initial value pass validation + - Nickname and initial value pass validation (system types allow leading _) - NON_EDITABLE_TYPES have empty or "0" initial_value - XD/YD hidden slots don't have nicknames - T/TD and CT/CTD pairs have matching retentive settings @@ -125,11 +101,9 @@ def verify_mdb_addresses(shared_data: SharedAddressData) -> tuple[list[str], lis shared_data: The SharedAddressData containing all addresses Returns: - Tuple of (issues, system_nickname_issues) where system_nickname_issues - contains "Cannot start with _" errors for SC/SD/X types. + List of issue strings. """ issues: list[str] = [] - system_nickname_issues: list[str] = [] all_rows = shared_data.all_rows # Build nickname dict for duplicate checking @@ -151,31 +125,32 @@ def verify_mdb_addresses(shared_data: SharedAddressData) -> tuple[list[str], lis continue # 2. Check address is within valid range - addr_range = ADDRESS_RANGES.get(row.memory_type) - if addr_range: - min_addr, max_addr = addr_range + bank = BANKS.get(row.memory_type) + if bank: + min_addr, max_addr = bank.min_addr, bank.max_addr if not (min_addr <= row.address <= max_addr): issues.append(f"MDB: {display} out of range (valid: {min_addr}-{max_addr})") else: issues.append(f"MDB: Unknown memory type '{row.memory_type}' for address {row.address}") # 3. Validate nickname and initial value - nick_valid, nick_error = validate_nickname(row.nickname, all_nicknames, addr_key) + is_system_type = row.memory_type in SYSTEM_NICKNAME_TYPES + nick_valid, nick_error = validate_nickname( + row.nickname, + all_nicknames, + addr_key, + system_bank=row.memory_type if is_system_type else None, + ) if not nick_valid: - # Separate out "Cannot start with _" for system types (SC/SD/X) - if nick_error == "Cannot start with _" and row.memory_type in SYSTEM_NICKNAME_TYPES: - system_nickname_issues.append( - f"MDB: {display} nickname '{row.nickname}' starts with underscore" - ) - else: - issues.append(f"MDB: {display} nickname invalid: {nick_error}") + issues.append(f"MDB: {display} nickname invalid: {nick_error}") init_valid, init_error = validate_initial_value(row.initial_value, row.data_type) if not init_valid: issues.append(f"MDB: {display} initial value invalid: {init_error}") # 4. NON_EDITABLE_TYPES should have empty or "0" initial_value - if row.memory_type in NON_EDITABLE_TYPES: + # (system types like SC/SD can have PLC-preset initial values) + if row.memory_type in NON_EDITABLE_TYPES and not is_system_type: if row.initial_value not in ("", "0"): nick_part = f" nickname '{row.nickname}'" if row.nickname else "" issues.append( @@ -193,185 +168,6 @@ def verify_mdb_addresses(shared_data: SharedAddressData) -> tuple[list[str], lis # 6. Check T/TD and CT/CTD retentive consistency issues.extend(_check_retentive_pairs(all_rows)) - return issues, system_nickname_issues - - -def _validate_cdv_new_value( - new_value: str, - type_code: int, - address: str, - filename: str, - row_num: int, -) -> list[str]: - """Validate a CDV new_value against its type code. - - Validates both: - 1. Storage format is valid (can be stored in CDV file) - 2. Display value is within PLC's logical range - - Args: - new_value: The new value string from CDV (in storage format) - type_code: The type code for this address - address: The address string (for error messages) - filename: The CDV filename (for error messages) - row_num: The row number (for error messages) - - Returns: - List of issue strings - """ - issues: list[str] = [] - prefix = f"CDV {filename} row {row_num}: {address}" - - try: - if type_code == TypeCode.BIT: - if new_value not in ("0", "1"): - issues.append(f"{prefix} new_value '{new_value}' invalid for BIT (must be 0 or 1)") - - elif type_code == TypeCode.INT: - val = int(new_value) - # Check storage format is valid (unsigned 32-bit) - if val < 0 or val > 0xFFFFFFFF: - issues.append(f"{prefix} new_value '{new_value}' out of range for INT storage") - else: - # Convert to display and check logical range - display_val = storage_to_display(new_value, type_code) - try: - int_val = int(display_val) - if int_val < INT_MIN or int_val > INT_MAX: - issues.append( - f"{prefix} new_value converts to {int_val}, " - f"outside INT range ({INT_MIN} to {INT_MAX})" - ) - except ValueError: - issues.append(f"{prefix} new_value '{new_value}' failed to convert to INT") - - elif type_code == TypeCode.INT2: - val = int(new_value) - if val < 0 or val > 0xFFFFFFFF: - issues.append(f"{prefix} new_value '{new_value}' out of range for INT2 storage") - else: - # Convert to display and check logical range - display_val = storage_to_display(new_value, type_code) - try: - int_val = int(display_val) - if int_val < INT2_MIN or int_val > INT2_MAX: - issues.append( - f"{prefix} new_value converts to {int_val}, " - f"outside INT2 range ({INT2_MIN} to {INT2_MAX})" - ) - except ValueError: - issues.append(f"{prefix} new_value '{new_value}' failed to convert to INT2") - - elif type_code == TypeCode.HEX: - val = int(new_value) - if val < 0 or val > 0xFFFF: - issues.append(f"{prefix} new_value '{new_value}' out of range for HEX (0-65535)") - - elif type_code == TypeCode.FLOAT: - val = int(new_value) - if val < 0 or val > 0xFFFFFFFF: - issues.append(f"{prefix} new_value '{new_value}' invalid for FLOAT storage") - else: - # Convert to display and check logical range - display_val = storage_to_display(new_value, type_code) - try: - float_val = float(display_val) - if float_val < FLOAT_MIN or float_val > FLOAT_MAX: - issues.append( - f"{prefix} new_value converts to {float_val}, outside FLOAT range" - ) - except ValueError: - issues.append(f"{prefix} new_value '{new_value}' failed to convert to FLOAT") - - elif type_code == TypeCode.TXT: - val = int(new_value) - if val < 0 or val > 127: - issues.append( - f"{prefix} new_value '{new_value}' out of range for TXT (0-127 ASCII)" - ) - - except ValueError: - issues.append(f"{prefix} new_value '{new_value}' is not a valid number") - - return issues - - -def _verify_single_cdv(cdv_path: Path) -> list[str]: - """Verify a single CDV file. - - Args: - cdv_path: Path to the CDV file - - Returns: - List of issue strings - """ - from ..views.dataview_editor.cdv_file import load_cdv - - issues: list[str] = [] - filename = cdv_path.name - - try: - rows, has_new_values, _header = load_cdv(cdv_path) - except Exception as e: - issues.append(f"CDV {filename}: Error loading file - {e}") - return issues - - for i, row in enumerate(rows): - if row.is_empty: - continue - - row_num = i + 1 - - # 1. Check address format is valid - parsed = parse_address(row.address) - if not parsed: - issues.append(f"CDV {filename} row {row_num}: Invalid address format '{row.address}'") - continue - - mem_type, num_part = parsed - - # 2. Check memory type is known - if mem_type not in MEMORY_TYPE_TO_CODE: - issues.append(f"CDV {filename} row {row_num}: Unknown memory type '{mem_type}'") - continue - - # 3. Check address number is within range - addr_range = ADDRESS_RANGES.get(mem_type) - if addr_range: - try: - # Handle 'u' suffix for XD/YD upper bytes - addr_num = int(num_part.rstrip("uU")) - min_addr, max_addr = addr_range - if not (min_addr <= addr_num <= max_addr): - issues.append( - f"CDV {filename} row {row_num}: {row.address} out of range " - f"(valid: {min_addr}-{max_addr})" - ) - except ValueError: - issues.append(f"CDV {filename} row {row_num}: Invalid address number '{num_part}'") - - # 4. Check type code matches expected - expected_code = get_type_code_for_address(row.address) - if expected_code is not None and row.type_code != expected_code: - issues.append( - f"CDV {filename} row {row_num}: Type code mismatch for {row.address} " - f"(has {row.type_code}, expected {expected_code})" - ) - - # 5. If new_value is set, validate format - if row.new_value: - new_value_issues = _validate_cdv_new_value( - row.new_value, row.type_code, row.address, filename, row_num - ) - issues.extend(new_value_issues) - - # 6. If new_value is set, address must be writable - if not is_address_writable(row.address): - issues.append( - f"CDV {filename} row {row_num}: {row.address} has new_value " - f"but address is not writable" - ) - return issues @@ -392,29 +188,7 @@ def verify_cdv_files(project_path: Path | str) -> tuple[list[str], int]: Returns: Tuple of (issues list, files checked count) """ - from ..views.dataview_editor.cdv_file import ( - get_dataview_folder, - list_cdv_files, - ) - - issues: list[str] = [] - files_checked = 0 - - try: - dataview_folder = get_dataview_folder(project_path) - if not dataview_folder: - return issues, files_checked - - cdv_files = list_cdv_files(dataview_folder) - for cdv_path in cdv_files: - files_checked += 1 - file_issues = _verify_single_cdv(cdv_path) - issues.extend(file_issues) - - except Exception as e: - issues.append(f"CDV: Error accessing dataview folder - {e}") - - return issues, files_checked + return check_cdv_files(project_path) def run_verification( @@ -434,7 +208,7 @@ def run_verification( result.total_addresses = len(shared_data.all_rows) # Verify MDB addresses - result.mdb_issues, result.system_nickname_issues = verify_mdb_addresses(shared_data) + result.mdb_issues = verify_mdb_addresses(shared_data) # Verify CDV files result.cdv_issues, result.cdv_files_checked = verify_cdv_files(project_path) diff --git a/src/clicknick/views/address_editor/panel.py b/src/clicknick/views/address_editor/panel.py index 0490a18..a3b1564 100644 --- a/src/clicknick/views/address_editor/panel.py +++ b/src/clicknick/views/address_editor/panel.py @@ -10,14 +10,10 @@ from tkinter import ttk from typing import TYPE_CHECKING +from pyclickplc.banks import DATA_TYPE_HINTS, NON_EDITABLE_TYPES, DataType from tksheet import num2alpha from ...models.address_row import AddressRow -from ...models.constants import ( - DATA_TYPE_HINTS, - NON_EDITABLE_TYPES, - DataType, -) from ...utils.filters import text_matches_filter from ...widgets.char_limit_tooltip import CharLimitTooltip from .panel_constants import ( diff --git a/src/clicknick/views/address_editor/view_builder.py b/src/clicknick/views/address_editor/view_builder.py index 8ec87ca..7c3ea35 100644 --- a/src/clicknick/views/address_editor/view_builder.py +++ b/src/clicknick/views/address_editor/view_builder.py @@ -10,11 +10,10 @@ from dataclasses import dataclass, field from typing import TYPE_CHECKING -from ...models.address_row import AddressRow, get_addr_key, is_xd_yd_hidden_slot -from ...models.constants import ( - ADDRESS_RANGES, - PAIRED_RETENTIVE_TYPES, -) +from pyclickplc.addresses import get_addr_key, is_xd_yd_hidden_slot +from pyclickplc.banks import BANKS, PAIRED_RETENTIVE_TYPES + +from ...models.address_row import AddressRow from ...services.block_service import compute_all_block_ranges if TYPE_CHECKING: @@ -85,17 +84,24 @@ def build_single_type_rows( Returns: List of AddressRow references from the skeleton """ - start, end = ADDRESS_RANGES[mem_type] + bank = BANKS[mem_type] rows = [] - for addr in range(start, end + 1): - # Skip hidden XD/YD slots (odd addresses >= 3 are upper bytes not displayed) - if is_xd_yd_hidden_slot(mem_type, addr): - continue + # Use valid_ranges for sparse banks (X/Y), full range for contiguous + if bank.valid_ranges is not None: + ranges = bank.valid_ranges + else: + ranges = ((bank.min_addr, bank.max_addr),) + + for lo, hi in ranges: + for addr in range(lo, hi + 1): + # Skip hidden XD/YD slots (odd addresses >= 3 are upper bytes not displayed) + if is_xd_yd_hidden_slot(mem_type, addr): + continue - addr_key = get_addr_key(mem_type, addr) - if addr_key in all_rows: - rows.append(all_rows[addr_key]) # Reference, not copy + addr_key = get_addr_key(mem_type, addr) + if addr_key in all_rows: + rows.append(all_rows[addr_key]) # Reference, not copy return rows @@ -125,10 +131,10 @@ def build_interleaved_rows( all_starts = [] all_ends = [] for mem_type in types: - if mem_type in ADDRESS_RANGES: - start, end = ADDRESS_RANGES[mem_type] - all_starts.append(start) - all_ends.append(end) + if mem_type in BANKS: + bank = BANKS[mem_type] + all_starts.append(bank.min_addr) + all_ends.append(bank.max_addr) if not all_starts: return [] diff --git a/src/clicknick/views/address_editor/window.py b/src/clicknick/views/address_editor/window.py index 1ee7f26..86c9cbd 100644 --- a/src/clicknick/views/address_editor/window.py +++ b/src/clicknick/views/address_editor/window.py @@ -10,17 +10,19 @@ from pathlib import Path from tkinter import messagebox, ttk -from ...data.address_store import AddressStore -from ...data.data_source import CsvDataSource -from ...models.address_row import get_addr_key -from ...models.blocktag import ( +from pyclickplc.addresses import get_addr_key +from pyclickplc.banks import DataType +from pyclickplc.blocks import ( format_block_tag, + is_block_name_available, parse_block_tag, strip_block_tag, + validate_block_span, ) -from ...models.constants import DataType + +from ...data.address_store import AddressStore +from ...data.data_source import CsvDataSource from ...services import ImportService, RowService -from ...services.block_service import validate_block_span from ...utils.rename_helpers import build_rename_pattern from ...widgets.add_block_dialog import AddBlockDialog from ...widgets.custom_notebook import CustomNotebook @@ -640,8 +642,17 @@ def _on_add_block_clicked(self) -> None: ) return - # Show the Add Block dialog - dialog = AddBlockDialog(self) + # Create validation callback for duplicate block names + def validate_block_name(name: str) -> tuple[bool, str]: + view = self._store.get_unified_view() + if view is None: + return True, "" + if is_block_name_available(name, view.rows): + return True, "" + return False, f"Block name '{name}' already exists.\nBlock names must be unique." + + # Show the Add Block dialog with validation + dialog = AddBlockDialog(self, validate_name=validate_block_name) self.wait_window(dialog) if dialog.result is None: diff --git a/src/clicknick/views/dataview_editor/cdv_file.py b/src/clicknick/views/dataview_editor/cdv_file.py index f665be4..1ed0fb7 100644 --- a/src/clicknick/views/dataview_editor/cdv_file.py +++ b/src/clicknick/views/dataview_editor/cdv_file.py @@ -1,197 +1,52 @@ -"""CDV file I/O for CLICK PLC DataView files. - -CDV files are CSV files with UTF-16 encoding used by CLICK Programming Software -to store DataView configurations. Each file contains up to 100 monitored addresses. - -File format: -- Encoding: UTF-16 LE with BOM -- Line 1: Header - "0,0,0" (no new values) or "-1,0,0" (has new values) -- Lines 2-101: Data rows - "Address,TypeCode[,NewValue]" or ",0" for empty -""" +"""CDV file helpers for ClickNick DataView workflows.""" from __future__ import annotations from pathlib import Path -from ...models.dataview_row import ( - MAX_DATAVIEW_ROWS, - DataviewRow, - create_empty_dataview, - get_type_code_for_address, -) - - -def load_cdv(path: Path | str) -> tuple[list[DataviewRow], bool, str]: - """Load a CDV file. - - Args: - path: Path to the CDV file. - - Returns: - Tuple of (rows, has_new_values, header) where: - - rows: List of DataviewRow objects (always MAX_DATAVIEW_ROWS length) - - has_new_values: True if the dataview has new values set - - header: The original header line from the file +from pyclickplc.dataview import DataViewFile +from pyclickplc.dataview import check_cdv_file as check_cdv_file +from pyclickplc.dataview import read_cdv as read_cdv +from pyclickplc.dataview import write_cdv as write_cdv - Raises: - FileNotFoundError: If the file doesn't exist. - ValueError: If the file format is invalid. - """ - path = Path(path) - if not path.exists(): - raise FileNotFoundError(f"CDV file not found: {path}") - - # Read file with UTF-16 encoding - content = path.read_text(encoding="utf-16") - lines = content.strip().split("\n") - - if not lines: - raise ValueError(f"Empty CDV file: {path}") - - # Parse header line - preserve the original - header = lines[0].strip() - header_parts = [p.strip() for p in header.split(",")] - if len(header_parts) < 1: - raise ValueError(f"Invalid CDV header: {header}") - - # First value: 0 = no new values, -1 = has new values - try: - has_new_values = int(header_parts[0]) == -1 - except ValueError: - has_new_values = False - # Parse data rows - rows = create_empty_dataview() - data_lines = lines[1 : MAX_DATAVIEW_ROWS + 1] - - for i, line in enumerate(data_lines): - if i >= MAX_DATAVIEW_ROWS: - break - - line = line.strip() - if not line: - continue - - parts = [p.strip() for p in line.split(",")] - - # Empty row: ",0" or just "," - if not parts[0]: - continue - - # Parse address - address = parts[0] - rows[i].address = address - - # Parse type code - if len(parts) > 1 and parts[1]: - try: - rows[i].type_code = int(parts[1]) - except ValueError: - # Try to infer from address - code = get_type_code_for_address(address) - rows[i].type_code = code if code is not None else 0 - else: - # Infer type code from address - code = get_type_code_for_address(address) - rows[i].type_code = code if code is not None else 0 - - # Parse new value (if present and has_new_values flag is set) - if len(parts) > 2 and parts[2]: - rows[i].new_value = parts[2] - - return rows, has_new_values, header +def load_cdv(path: Path | str): + """Load wrapper returning legacy tuple shape for ClickNick internals.""" + dataview = read_cdv(path) + return dataview.rows, dataview.has_new_values, dataview.header def save_cdv( path: Path | str, - rows: list[DataviewRow], + rows, has_new_values: bool, header: str | None = None, ) -> None: - """Save a CDV file. - - Args: - path: Path to save the CDV file. - rows: List of DataviewRow objects (may exceed MAX_DATAVIEW_ROWS). - has_new_values: True if any rows have new values set. - header: Original header line to preserve. If None, uses default format. - - Note: - Only the first MAX_DATAVIEW_ROWS (100) rows are saved to maintain - file format compatibility. Overflow rows (index 100+) are not persisted. - """ - path = Path(path) - - # Build content - lines: list[str] = [] - - # Header line - use original if provided, otherwise build default - if header is not None: - lines.append(header) - else: - header_flag = -1 if has_new_values else 0 - lines.append(f"{header_flag},0,0") - - # Data rows - only save first MAX_DATAVIEW_ROWS - rows_to_save = list(rows[:MAX_DATAVIEW_ROWS]) - - # Pad with empty rows if needed to always have exactly 100 lines - while len(rows_to_save) < MAX_DATAVIEW_ROWS: - rows_to_save.append(DataviewRow()) - - for row in rows_to_save: - if row.is_empty: - lines.append(",0") - else: - if row.new_value: - lines.append(f"{row.address},{row.type_code},{row.new_value}") - else: - lines.append(f"{row.address},{row.type_code}") - - # Join with newlines and add trailing newline - content = "\n".join(lines) + "\n" - - # Write with UTF-16 encoding (includes BOM automatically) - path.write_text(content, encoding="utf-16") + """Save wrapper accepting legacy args for ClickNick internals.""" + dataview = DataViewFile( + rows=rows, + has_new_values=has_new_values, + header=header or f"{-1 if has_new_values else 0},0,0", + ) + write_cdv(path, dataview) def export_cdv( path: Path | str, - rows: list[DataviewRow], + rows, has_new_values: bool, header: str | None = None, ) -> None: - """Export a CDV file to a new location. - - This is identical to save_cdv but semantically indicates exporting - rather than saving to the original location. - - Args: - path: Path to export the CDV file. - rows: List of DataviewRow objects. - has_new_values: True if any rows have new values set. - header: Original header line to preserve. If None, uses default format. - """ + """Export a CDV file to a new location.""" save_cdv(path, rows, has_new_values, header) def get_dataview_folder(project_path: Path | str) -> Path | None: - """Get the DataView folder for a CLICK project. - - The DataView folder is located at: {project_path}/CLICK ({unique_id})/DataView - where {unique_id} is a hex identifier like "00010A98". - - Args: - project_path: Path to the CLICK project folder. - - Returns: - Path to the DataView folder, or None if not found. - """ + """Get the DataView folder for a CLICK project.""" project_path = Path(project_path) if not project_path.is_dir(): return None - # Look for CLICK (*) subdirectory for child in project_path.iterdir(): if child.is_dir() and child.name.startswith("CLICK ("): dataview_path = child / "DataView" @@ -202,16 +57,27 @@ def get_dataview_folder(project_path: Path | str) -> Path | None: def list_cdv_files(dataview_folder: Path | str) -> list[Path]: - """List all CDV files in a DataView folder. - - Args: - dataview_folder: Path to the DataView folder. - - Returns: - List of Path objects for each CDV file, sorted by name. - """ + """List all CDV files in a DataView folder sorted case-insensitively.""" folder = Path(dataview_folder) if not folder.is_dir(): return [] - return sorted(folder.glob("*.cdv"), key=lambda p: p.stem.lower()) + + +def check_cdv_files(project_path: Path | str) -> tuple[list[str], int]: + """Verify all CDV files in a project's DataView folder.""" + issues: list[str] = [] + files_checked = 0 + + try: + dataview_folder = get_dataview_folder(project_path) + if dataview_folder is None: + return issues, files_checked + + for cdv_path in list_cdv_files(dataview_folder): + files_checked += 1 + issues.extend(check_cdv_file(cdv_path)) + except Exception as exc: + issues.append(f"CDV: Error accessing dataview folder - {exc}") + + return issues, files_checked diff --git a/src/clicknick/views/dataview_editor/panel.py b/src/clicknick/views/dataview_editor/panel.py index 1377dc3..04ab69b 100644 --- a/src/clicknick/views/dataview_editor/panel.py +++ b/src/clicknick/views/dataview_editor/panel.py @@ -8,28 +8,41 @@ import os import tkinter as tk -from collections.abc import Callable +from collections.abc import Callable, Mapping from pathlib import Path from tkinter import messagebox, ttk -from tksheet import Sheet - -from ...models.dataview_row import ( +from pyclickplc.addresses import normalize_address +from pyclickplc.dataview import ( MAX_DATAVIEW_ROWS, - DataviewRow, + DataViewFile, + DataViewRecord as DataViewRecord, create_empty_dataview, - get_type_code_for_address, + get_data_type_for_address, ) +from tksheet import Sheet + from .cdv_file import load_cdv, save_cdv # Column indices COL_ADDRESS = 0 COL_NICKNAME = 1 COL_COMMENT = 2 +COL_NEW_VALUE = 3 +COL_WRITE = 4 +COL_LIVE = 5 + +NUM_COLUMNS = 6 + +PlcValue = bool | int | float | str # Color for overflow rows (index >= 100) COLOR_OVERFLOW_BG = "#e0e0e0" +# Highlight for live value changes +COLOR_LIVE_FLASH = "#90EE90" # Light green +LIVE_FLASH_DURATION_MS = 1500 + # File monitoring interval in milliseconds FILE_MONITOR_INTERVAL_MS = 2000 @@ -37,13 +50,71 @@ class DataviewPanel(ttk.Frame): """Panel for editing a single DataView's addresses. - Displays rows with columns: Address, Nickname, Comment. + Displays rows with columns: + Address, Nickname, Comment, New Value, Write, Live. Target is 100 rows, but supports overflow (rows 100+) with grey background. Overflow rows are excluded from saves. Supports row reordering, insert/delete, cut/copy/paste, and address autocomplete. - New Value data is preserved internally for saving but not displayed. + New Value data is preserved internally and shown using DataViewFile helpers. """ + def _canonical_address(self, address: str) -> str | None: + """Normalize an address for poll/write payloads.""" + candidate = (address or "").strip() + if not candidate: + return None + normalized = self.address_normalizer(candidate) if self.address_normalizer else None + if not normalized: + normalized = normalize_address(candidate) + if not normalized: + return None + return normalized.upper() + + @staticmethod + def _display_text(value: object) -> str: + if value is None: + return "" + return str(value).strip() + + def _is_row_write_enabled(self, row: DataViewRecord) -> bool: + """True when write controls should be interactive for a row.""" + return bool(row.address.strip()) and row.is_writable + + def _new_value_display(self, row: DataViewRecord) -> str: + return DataViewFile.value_to_display(row.new_value, row.data_type) + + def _live_value_display(self, row: DataViewRecord) -> str: + address = self._canonical_address(row.address) + if not address: + return "" + value = self._live_values.get(address) + return DataViewFile.value_to_display(value, row.data_type) + + def _sync_write_checkbox(self, row_idx: int) -> None: + """Refresh write checkbox cell for a row.""" + row = self.rows[row_idx] + is_enabled = self._is_row_write_enabled(row) + + # Keep checked state only when writes are allowed. + if not is_enabled: + self._write_checks[row_idx] = False + + try: + self.sheet.delete_checkbox(row_idx, COL_WRITE) + except Exception: + pass + + if is_enabled: + checked = bool(self._write_checks[row_idx]) + self.sheet.create_checkbox(r=row_idx, c=COL_WRITE, checked=checked, text="") + self.sheet.set_cell_data(row_idx, COL_WRITE, checked) + else: + self.sheet.set_cell_data(row_idx, COL_WRITE, "") + + def _notify_addresses_changed(self) -> None: + if self.on_addresses_changed: + self.on_addresses_changed(self) + def _refresh_row_indices(self) -> None: """Refresh row index labels (1, 2, 3, ...).""" self.sheet.set_index_data([str(i + 1) for i in range(len(self.rows))]) @@ -54,7 +125,7 @@ def _apply_overflow_styling(self) -> None: for i in range(len(self.rows)): if i >= MAX_DATAVIEW_ROWS: # Apply grey background to all columns - for col in range(3): # COL_ADDRESS, COL_NICKNAME, COL_COMMENT + for col in range(NUM_COLUMNS): self.sheet.highlight_cells( row=i, column=col, @@ -72,11 +143,23 @@ def _populate_sheet(self) -> None: self._suppress_notifications = True try: data = [] - for row in self.rows: - # Build display row (New Value is kept internally but not displayed) - data.append([row.address, row.nickname, row.comment]) + for i, row in enumerate(self.rows): + is_enabled = self._is_row_write_enabled(row) + checked = bool(self._write_checks[i]) if is_enabled else "" + data.append( + [ + row.address, + row.nickname, + row.comment, + self._new_value_display(row), + checked, + self._live_value_display(row), + ] + ) self.sheet.set_sheet_data(data, reset_col_positions=False) + for i in range(len(self.rows)): + self._sync_write_checkbox(i) # Set row index labels (1 to N) self._refresh_row_indices() @@ -90,10 +173,12 @@ def _update_row_display(self, row_idx: int) -> None: """Update display for a single row.""" row = self.rows[row_idx] - # Update cells (New Value is kept internally but not displayed) self.sheet.set_cell_data(row_idx, COL_ADDRESS, row.address) self.sheet.set_cell_data(row_idx, COL_NICKNAME, row.nickname) self.sheet.set_cell_data(row_idx, COL_COMMENT, row.comment) + self.sheet.set_cell_data(row_idx, COL_NEW_VALUE, self._new_value_display(row)) + self.sheet.set_cell_data(row_idx, COL_LIVE, self._live_value_display(row)) + self._sync_write_checkbox(row_idx) def _update_status(self) -> None: """Update the status label.""" @@ -126,6 +211,7 @@ def _on_sheet_modified(self, event) -> None: return data_changed = False + addresses_changed = False for (row_idx, col), _old_value in table_cells.items(): if row_idx >= len(self.rows): @@ -135,11 +221,12 @@ def _on_sheet_modified(self, event) -> None: new_value = self.sheet.get_cell_data(row_idx, col) if col == COL_ADDRESS: + old_address = row.address new_address = (new_value or "").strip() if new_address != row.address: row.address = new_address - row.update_type_code() + row.update_data_type() # Lookup nickname and comment if new_address and self.nickname_lookup: @@ -153,19 +240,55 @@ def _on_sheet_modified(self, event) -> None: row.nickname = "" row.comment = "" - # Clear new value if address changed and not writable - if not row.is_writable: - row.new_value = "" + # Clear stale runtime state when writability/address changes. + if old_address != row.address: + old_canonical = self._canonical_address(old_address) + if old_canonical: + self._live_values.pop(old_canonical, None) + + row.new_value = None + self._write_checks[row_idx] = False + if not self._is_row_write_enabled(row): + row.new_value = None # Update display for this row self._update_row_display(row_idx) data_changed = True + addresses_changed = True + + elif col == COL_NEW_VALUE: + if not self._is_row_write_enabled(row): + self._update_row_display(row_idx) + continue + + display_text = self._display_text(new_value) + old_native = row.new_value + try: + DataViewFile.set_row_new_value_from_display(row, display_text) + except ValueError: + self._update_row_display(row_idx) + continue + + normalized = self._new_value_display(row) + if self.sheet.get_cell_data(row_idx, COL_NEW_VALUE) != normalized: + self.sheet.set_cell_data(row_idx, COL_NEW_VALUE, normalized) + if row.new_value != old_native: + data_changed = True + + elif col == COL_WRITE: + if self._is_row_write_enabled(row): + self._write_checks[row_idx] = bool(new_value) + else: + self._write_checks[row_idx] = False + self._sync_write_checkbox(row_idx) if data_changed: self._is_dirty = True self._update_status() if self.on_modified: self.on_modified() + if addresses_changed: + self._notify_addresses_changed() def _on_rows_moved(self, event) -> None: """Handle row drag-and-drop reordering. @@ -188,27 +311,37 @@ def _on_rows_moved(self, event) -> None: # 2. Create the "Skeleton" List # Place the moved items exactly where they belong. new_rows = [None] * len(self.rows) + new_checks: list[bool | None] = [None] * len(self._write_checks) for old_idx, new_idx in mapping.items(): new_rows[new_idx] = self.rows[old_idx] + new_checks[new_idx] = self._write_checks[old_idx] # 3. Collect the Unmoved Rows # Identify rows that weren't moved and reverse them so we can .pop() efficiently. remaining = [row for i, row in enumerate(self.rows) if i not in mapping] remaining.reverse() + remaining_checks = [check for i, check in enumerate(self._write_checks) if i not in mapping] + remaining_checks.reverse() # 4. Fill the Gaps # List comprehension: If the slot has a moved row, keep it. # If it's None, pop the next available unmoved row into it. self.rows = [row if row is not None else remaining.pop() for row in new_rows] + self._write_checks = [ + check if check is not None else remaining_checks.pop() for check in new_checks + ] self._is_dirty = True self._update_status() self._refresh_row_indices() self._apply_overflow_styling() + for i in range(len(self.rows)): + self._sync_write_checkbox(i) self.sheet.refresh(redraw_header=False, redraw_row_index=True) if self.on_modified: self.on_modified() + self._notify_addresses_changed() def _trim_empty_rows_from_bottom(self) -> None: """Remove empty rows from bottom until total <= MAX_DATAVIEW_ROWS or no more empty. @@ -222,6 +355,7 @@ def _trim_empty_rows_from_bottom(self) -> None: if self.rows[i].is_empty: # Delete from data model only (sheet already has the row) del self.rows[i] + del self._write_checks[i] # Delete from sheet self.sheet.delete_rows([i], emit_event=False) found_empty = True @@ -249,9 +383,10 @@ def _on_rows_added(self, event) -> None: data_idx = added.get("data_index", 0) num_added = added.get("num", 1) - # Insert new DataviewRow objects at the insertion point + # Insert new DataViewRecord objects at the insertion point for i in range(num_added): - self.rows.insert(data_idx + i, DataviewRow()) + self.rows.insert(data_idx + i, DataViewRecord()) + self._write_checks.insert(data_idx + i, False) # Sync data model with sheet data (for paste operations with data) for i in range(num_added): @@ -260,7 +395,7 @@ def _on_rows_added(self, event) -> None: address = self.sheet.get_cell_data(row_idx, COL_ADDRESS) or "" if address: self.rows[row_idx].address = address.strip().upper() - self.rows[row_idx].update_type_code() + self.rows[row_idx].update_data_type() if self.nickname_lookup: result = self.nickname_lookup(self.rows[row_idx].address) if result: @@ -270,25 +405,23 @@ def _on_rows_added(self, event) -> None: self._trim_empty_rows_from_bottom() # Update display - self._refresh_row_indices() - self._apply_overflow_styling() + self._populate_sheet() self._is_dirty = True self._update_status() if self.on_modified: self.on_modified() + self._notify_addresses_changed() def _pad_to_target(self) -> None: """Add empty rows at bottom until exactly MAX_DATAVIEW_ROWS rows exist.""" while len(self.rows) < MAX_DATAVIEW_ROWS: - self.rows.append(DataviewRow()) + self.rows.append(DataViewRecord()) + self._write_checks.append(False) # Add row to sheet at end row_idx = len(self.rows) - 1 self.sheet.insert_rows(rows=1, idx=row_idx, emit_event=False) - # Set empty row data - self.sheet.set_cell_data(row_idx, COL_ADDRESS, "") - self.sheet.set_cell_data(row_idx, COL_NICKNAME, "") - self.sheet.set_cell_data(row_idx, COL_COMMENT, "") + self._update_row_display(row_idx) def _on_rows_deleted(self, event) -> None: """Handle row deletion events. @@ -304,24 +437,25 @@ def _on_rows_deleted(self, event) -> None: if not deleted: return - # Remove DataviewRow objects that were deleted + # Remove DataViewRecord objects that were deleted # The 'deleted' dict maps old indices to row data deleted_indices = sorted(deleted.keys(), reverse=True) for idx in deleted_indices: if isinstance(idx, int) and idx < len(self.rows): del self.rows[idx] + del self._write_checks[idx] # Auto-pad: add empty rows if under 100 self._pad_to_target() # Update display - self._refresh_row_indices() - self._apply_overflow_styling() + self._populate_sheet() self._is_dirty = True self._update_status() if self.on_modified: self.on_modified() + self._notify_addresses_changed() def _validate_edit(self, event) -> str: """Validate and normalize cell edits. @@ -348,18 +482,37 @@ def _validate_edit(self, event) -> str: # Fallback: basic validation without normalization normalized = value.upper() - if get_type_code_for_address(normalized) is None: + if get_data_type_for_address(normalized) is None: return "" return normalized + if hasattr(event, "column") and event.column == COL_NEW_VALUE: + row_idx = getattr(event, "row", None) + if row_idx is None or row_idx >= len(self.rows): + return event.value + + row = self.rows[row_idx] + if not self._is_row_write_enabled(row): + return self._new_value_display(row) + + display_text = self._display_text(event.value) + ok, _error = DataViewFile.validate_row_display(row, display_text) + if not ok: + return self._new_value_display(row) + + parsed = DataViewFile.try_parse_display(display_text, row.data_type) + if not parsed.ok: + return self._new_value_display(row) + return DataViewFile.value_to_display(parsed.value, row.data_type) + return event.value def _create_widgets(self) -> None: """Create all panel widgets.""" - # Table (tksheet) - New Value is kept internally but not displayed + # Table (tksheet) self.sheet = Sheet( self, - headers=["Address", "Nickname", "Comment"], + headers=["Address", "Nickname", "Comment", "New Value", "Write", "Live"], show_row_index=True, height=400, width=700, @@ -388,11 +541,11 @@ def _create_widgets(self) -> None: ) # Set column widths - self.sheet.set_column_widths([80, 180, 280]) + self.sheet.set_column_widths([90, 170, 220, 120, 70, 120]) self.sheet.row_index(40) # Row index width (row numbers) - # Make Nickname and Comment columns read-only (populated from address lookup) - self.sheet.readonly_columns([COL_NICKNAME, COL_COMMENT]) + # Read-only display columns. + self.sheet.readonly_columns([COL_NICKNAME, COL_COMMENT, COL_LIVE]) # Set up edit validation and bind to modification events self.sheet.edit_validation(self._validate_edit) @@ -424,6 +577,8 @@ def _load_file(self) -> None: try: self.rows, self.has_new_values, self._header = load_cdv(self.file_path) + self._write_checks = [False] * len(self.rows) + self._live_values.clear() # Populate nicknames and comments from lookup if self.nickname_lookup: @@ -453,6 +608,7 @@ def __init__( parent: tk.Widget, file_path: Path | None = None, on_modified: Callable[[], None] | None = None, + on_addresses_changed: Callable[[DataviewPanel], None] | None = None, nickname_lookup: Callable[[str], tuple[str, str] | None] | None = None, address_normalizer: Callable[[str], str | None] | None = None, name: str | None = None, @@ -463,6 +619,7 @@ def __init__( parent: Parent widget file_path: Path to the CDV file (None for new unsaved dataview) on_modified: Callback when data is modified + on_addresses_changed: Callback when row addresses change nickname_lookup: Callback to lookup (nickname, comment) for an address address_normalizer: Callback to normalize address to canonical form (e.g., "x1" -> "X001") name: Custom name for new unsaved dataviews (used instead of "Untitled") @@ -471,19 +628,25 @@ def __init__( self.file_path = file_path self.on_modified = on_modified + self.on_addresses_changed = on_addresses_changed self._custom_name = name self.nickname_lookup = nickname_lookup self.address_normalizer = address_normalizer # Data model - self.rows: list[DataviewRow] = create_empty_dataview() + self.rows: list[DataViewRecord] = create_empty_dataview() self.has_new_values = False self._header: str | None = None # Original header from file self._is_dirty = False + self._write_checks: list[bool] = [False] * len(self.rows) + self._live_values: dict[str, PlcValue] = {} # Suppress notifications during programmatic updates self._suppress_notifications = False + # Live value flash highlight tracking: row_idx -> after_id + self._live_flash_pending: dict[int, str] = {} + # File monitoring state self._last_mtime: float = 0.0 self._monitor_after_id: str | None = None @@ -514,7 +677,9 @@ def save(self) -> bool: try: # Check if any rows in the saveable range have new values saveable_rows = self.rows[:MAX_DATAVIEW_ROWS] - self.has_new_values = any(r.new_value for r in saveable_rows if not r.is_empty) + self.has_new_values = any( + r.new_value is not None for r in saveable_rows if not r.is_empty + ) save_cdv(self.file_path, self.rows, self.has_new_values, self._header) self._is_dirty = False self._update_status() @@ -583,6 +748,8 @@ def _reload_file(self) -> None: try: self.rows, self.has_new_values, self._header = load_cdv(self.file_path) + self._write_checks = [False] * len(self.rows) + self._live_values.clear() # Populate nicknames and comments from lookup if self.nickname_lookup: @@ -680,8 +847,8 @@ def _insert_address_at(self, address: str, idx: int) -> None: self._suppress_notifications = True try: # Create new row - new_row = DataviewRow(address=address) - new_row.update_type_code() + new_row = DataViewRecord(address=address) + new_row.update_data_type() # Lookup nickname and comment if self.nickname_lookup: @@ -691,19 +858,16 @@ def _insert_address_at(self, address: str, idx: int) -> None: # Insert into data model self.rows.insert(idx, new_row) + self._write_checks.insert(idx, False) # Insert into sheet self.sheet.insert_rows(rows=1, idx=idx, emit_event=False) - self.sheet.set_cell_data(idx, COL_ADDRESS, new_row.address) - self.sheet.set_cell_data(idx, COL_NICKNAME, new_row.nickname) - self.sheet.set_cell_data(idx, COL_COMMENT, new_row.comment) # Auto-pad: trim empty rows from bottom if over 100 self._trim_empty_rows_from_bottom() # Update display - self._refresh_row_indices() - self._apply_overflow_styling() + self._populate_sheet() self._is_dirty = True self._update_status() @@ -726,12 +890,13 @@ def _insert_address_at(self, address: str, idx: int) -> None: if self.on_modified: self.on_modified() + self._notify_addresses_changed() def _fill_row_at(self, address: str, idx: int) -> None: """Fill an existing empty row with the given address (no insertion).""" row = self.rows[idx] row.address = address - row.update_type_code() + row.update_data_type() # Lookup nickname and comment if self.nickname_lookup: @@ -750,6 +915,7 @@ def _fill_row_at(self, address: str, idx: int) -> None: if self.on_modified: self.on_modified() + self._notify_addresses_changed() def _get_selected_row_index(self) -> int | None: """Get the index of the currently selected row, if any. @@ -827,18 +993,133 @@ def clear_row(self, row_idx: int) -> None: row_idx: Index of the row to clear (0-99) """ if 0 <= row_idx < len(self.rows): + old_canonical = self._canonical_address(self.rows[row_idx].address) self.rows[row_idx].clear() + self._write_checks[row_idx] = False + if old_canonical: + self._live_values.pop(old_canonical, None) self._update_row_display(row_idx) self._is_dirty = True self._update_status() if self.on_modified: self.on_modified() + self._notify_addresses_changed() def get_selected_rows(self) -> list[int]: """Get indices of currently selected rows.""" return list(self.sheet.get_selected_rows()) + def get_poll_addresses(self) -> list[str]: + """Return valid unique poll addresses in table order.""" + seen: set[str] = set() + addresses: list[str] = [] + for row in self.rows: + canonical = self._canonical_address(row.address) + if not canonical or canonical in seen: + continue + seen.add(canonical) + addresses.append(canonical) + return addresses + + def _flash_live_cell(self, row_idx: int) -> None: + """Briefly highlight a Live cell to indicate a value change.""" + # Cancel any existing pending clear for this row + if row_idx in self._live_flash_pending: + try: + self.after_cancel(self._live_flash_pending[row_idx]) + except Exception: + pass + + self.sheet.highlight_cells(row=row_idx, column=COL_LIVE, bg=COLOR_LIVE_FLASH) + + def _clear(idx: int = row_idx) -> None: + self._live_flash_pending.pop(idx, None) + try: + self.sheet.dehighlight_cells(row=idx, column=COL_LIVE) + except Exception: + pass + + after_id = self.after(LIVE_FLASH_DURATION_MS, _clear) + self._live_flash_pending[row_idx] = after_id + + def update_live_values(self, values: Mapping[str, PlcValue]) -> None: + """Update the Live column from a service callback payload.""" + normalized: dict[str, PlcValue] = {} + for address, value in values.items(): + canonical = self._canonical_address(address) + if canonical: + normalized[canonical] = value + self._live_values = normalized + + for row_idx, row in enumerate(self.rows): + new_display = self._live_value_display(row) + old_display = self.sheet.get_cell_data(row_idx, COL_LIVE) + self.sheet.set_cell_data(row_idx, COL_LIVE, new_display) + + # Flash cell if value actually changed and is non-empty + if new_display and new_display != old_display: + self._flash_live_cell(row_idx) + + def clear_live_values(self) -> None: + """Clear the Live column for all rows.""" + self._live_values.clear() + # Cancel all pending flash clears + for after_id in self._live_flash_pending.values(): + try: + self.after_cancel(after_id) + except Exception: + pass + self._live_flash_pending.clear() + for row_idx in range(len(self.rows)): + self.sheet.set_cell_data(row_idx, COL_LIVE, "") + try: + self.sheet.dehighlight_cells(row=row_idx, column=COL_LIVE) + except Exception: + pass + + def get_write_rows(self) -> list[tuple[str, PlcValue]]: + """Return checked writable rows with values.""" + payload: list[tuple[str, PlcValue]] = [] + for row_idx, row in enumerate(self.rows): + if not self._write_checks[row_idx]: + continue + if not self._is_row_write_enabled(row): + continue + if row.new_value is None: + continue + canonical = self._canonical_address(row.address) + if not canonical: + continue + payload.append((canonical, row.new_value)) + return payload + + def get_write_all_rows(self) -> list[tuple[str, PlcValue]]: + """Return all writable rows with non-empty values.""" + payload: list[tuple[str, PlcValue]] = [] + for row in self.rows: + if not self._is_row_write_enabled(row) or row.new_value is None: + continue + canonical = self._canonical_address(row.address) + if not canonical: + continue + payload.append((canonical, row.new_value)) + return payload + + def clear_write_checks(self) -> None: + """Clear all Write checkbox states.""" + for row_idx in range(len(self._write_checks)): + self._write_checks[row_idx] = False + self._sync_write_checkbox(row_idx) + + def set_modbus_columns_visible(self, visible: bool) -> None: + """Show or hide the Modbus-related columns (New Value, Write, Live).""" + columns = {COL_NEW_VALUE, COL_WRITE, COL_LIVE} + if visible: + self.sheet.show_columns(columns=columns) + else: + self.sheet.hide_columns(columns=columns, data_indexes=True) + def refresh_nicknames(self) -> None: """Refresh all nickname and comment lookups.""" if not self.nickname_lookup: diff --git a/src/clicknick/views/dataview_editor/window.py b/src/clicknick/views/dataview_editor/window.py index a5a355d..c0cb330 100644 --- a/src/clicknick/views/dataview_editor/window.py +++ b/src/clicknick/views/dataview_editor/window.py @@ -6,14 +6,18 @@ from __future__ import annotations import os +import threading import tkinter as tk +from collections.abc import Callable, Mapping from pathlib import Path from tkinter import filedialog, messagebox, ttk from typing import TYPE_CHECKING +from pyclickplc import ConnectionState, ModbusService +from pyclickplc.addresses import get_addr_key +from pyclickplc.banks import INTERLEAVED_PAIRS + from ...data.shared_dataview import SharedDataviewData -from ...models.address_row import get_addr_key -from ...models.constants import INTERLEAVED_PAIRS from ...widgets.custom_notebook import CustomNotebook from ...widgets.new_dataview_dialog import NewDataviewDialog from ...widgets.nickname_combobox import NicknameCombobox @@ -24,6 +28,9 @@ pass +PlcValue = bool | int | float | str + + class DataviewEditorWindow(tk.Toplevel): """Main window for the Dataview Editor. @@ -41,8 +48,8 @@ def _setup_window(self) -> None: else: self.title(base_title) - self.geometry("900x600") - self.minsize(600, 400) + self.geometry("800x600") + self.minsize(800, 400) def _refresh_file_list(self) -> None: """Refresh the file list from the dataview folder.""" @@ -81,6 +88,331 @@ def _on_panel_modified(self) -> None: if panel: self._update_tab_title(panel) + def _iter_open_panels(self) -> list[DataviewPanel]: + """Return panel widgets currently attached to notebook tabs.""" + panels: list[DataviewPanel] = [] + try: + for tab_id in self.notebook.tabs(): + panel = self.notebook.nametowidget(tab_id) + if isinstance(panel, DataviewPanel): + panels.append(panel) + except tk.TclError: + pass + return panels + + def _is_modbus_connected(self) -> bool: + return self._connection_state == ConnectionState.CONNECTED + + def _set_modbus_error_text(self, text: str = "") -> None: + if text: + messagebox.showerror("Modbus Error", text, parent=self) + + def _update_modbus_controls(self) -> None: + """Refresh connection/write control state.""" + connected = self._is_modbus_connected() + connecting = self._connection_state == ConnectionState.CONNECTING + busy = self._modbus_busy + write_busy = self._modbus_write_busy + action_busy = busy or write_busy + + if connecting: + self._modbus_toggle_var.set("Connecting…") + elif connected and busy: + self._modbus_toggle_var.set("Disconnecting…") + elif connected: + self._modbus_toggle_var.set("Disconnect") + else: + self._modbus_toggle_var.set("Connect") + self.write_checked_button.config( + state=(tk.NORMAL if connected and not action_busy else tk.DISABLED) + ) + self.write_all_button.config( + state=(tk.NORMAL if connected and not action_busy else tk.DISABLED) + ) + self.modbus_connect_button.config( + state=(tk.DISABLED if connecting or action_busy else tk.NORMAL) + ) + self.host_entry.config( + state=(tk.DISABLED if connected or connecting or action_busy else tk.NORMAL) + ) + self.port_entry.config( + state=(tk.DISABLED if connected or connecting or action_busy else tk.NORMAL) + ) + + if hasattr(self, "connection_menu"): + self.connection_menu.entryconfig( + "Connect", state=(tk.DISABLED if connected or action_busy else tk.NORMAL) + ) + self.connection_menu.entryconfig( + "Disconnect", state=(tk.NORMAL if connected and not action_busy else tk.DISABLED) + ) + + @staticmethod + def _run_background(target, *args) -> None: + """Run a Modbus action in a daemon worker thread.""" + threading.Thread(target=target, args=args, daemon=True).start() + + def _schedule_ui(self, callback: Callable[[], None]) -> None: + """Schedule callback on Tk thread; ignore if window is already destroyed.""" + try: + self.after(0, callback) + except tk.TclError: + pass + + def _run_modbus_action( + self, + action: Callable[[], object | None], + on_complete: Callable[[object | None, Exception | None], None], + ) -> None: + """Execute blocking Modbus action off UI thread and marshal completion to Tk.""" + + def _worker() -> None: + result: object | None = None + error: Exception | None = None + try: + result = action() + except Exception as exc: + error = exc + self._schedule_ui(lambda: on_complete(result, error)) + + self._run_background(_worker) + + def _apply_modbus_state(self, state: ConnectionState, error: Exception | None) -> None: + self._connection_state = state + + if error is not None: + self._set_modbus_error_text(str(error)) + + self._update_modbus_controls() + + def _on_modbus_state_callback(self, state: ConnectionState, error: Exception | None) -> None: + """Background thread callback from ModbusService.""" + self._schedule_ui(lambda: self._apply_modbus_state(state, error)) + + def _apply_modbus_values(self, values: Mapping[str, PlcValue]) -> None: + panel = self._get_current_panel() + if panel is not None: + panel.update_live_values(values) + + def _on_modbus_values_callback(self, values: Mapping[str, PlcValue]) -> None: + """Background thread callback from ModbusService.""" + self._schedule_ui(lambda: self._apply_modbus_values(values)) + + def _ensure_modbus_service(self) -> ModbusService: + """Create ModbusService on first use.""" + if self._modbus is None: + self._modbus = ModbusService( + on_state=self._on_modbus_state_callback, + on_values=self._on_modbus_values_callback, + ) + return self._modbus + + def _sync_poll_addresses_from_active_tab(self, *, force: bool = False) -> None: + """Replace service poll list from the currently active tab.""" + if self._modbus is None: + return + if not force and not self._is_modbus_connected(): + return + + panel = self._get_current_panel() + self._active_panel = panel + + if panel is None: + self._modbus.clear_poll_addresses() + return + + addresses = panel.get_poll_addresses() + if addresses: + self._modbus.set_poll_addresses(addresses) + else: + self._modbus.clear_poll_addresses() + + def _clear_live_values_all_panels(self) -> None: + for panel in self._iter_open_panels(): + panel.clear_live_values() + + def _on_panel_addresses_changed(self, panel: DataviewPanel) -> None: + """Refresh poll list when active tab addresses change.""" + if panel is not self._get_current_panel(): + return + self._sync_poll_addresses_from_active_tab() + + def _parse_host_port(self) -> tuple[str, int] | None: + host = self._modbus_host_var.get().strip() + port_text = self._modbus_port_var.get().strip() + + if not host: + self._set_modbus_error_text("Host is required.") + return None + + try: + port = int(port_text) + except ValueError: + self._set_modbus_error_text("Port must be an integer.") + return None + + if not 1 <= port <= 65535: + self._set_modbus_error_text("Port must be between 1 and 65535.") + return None + + return host, port + + def _on_connect_modbus_complete(self, error: Exception | None) -> None: + self._modbus_busy = False + if error is not None: + self._connection_state = ConnectionState.ERROR + + self._set_modbus_error_text(str(error)) + self._update_modbus_controls() + return + + self._connection_state = ConnectionState.CONNECTED + self._set_modbus_error_text("") + self._sync_poll_addresses_from_active_tab(force=True) + self._update_modbus_controls() + + def _connect_modbus(self) -> None: + if self._modbus_busy: + return + + endpoint = self._parse_host_port() + if endpoint is None: + return + + host, port = endpoint + service = self._ensure_modbus_service() + self._modbus_busy = True + self._connection_state = ConnectionState.CONNECTING + self._set_modbus_error_text("") + self._update_modbus_controls() + + self._run_modbus_action( + lambda: service.connect(host, port), + lambda _result, error: self._on_connect_modbus_complete(error), + ) + + @staticmethod + def _disconnect_modbus_service(service: ModbusService) -> Exception | None: + error: Exception | None = None + try: + service.clear_poll_addresses() + except Exception as exc: + error = exc + try: + service.disconnect() + except Exception as exc: + if error is None: + error = exc + return error + + def _on_disconnect_modbus_complete(self, error: Exception | None) -> None: + self._modbus_busy = False + self._connection_state = ConnectionState.DISCONNECTED + self._set_modbus_error_text("" if error is None else str(error)) + self._update_modbus_controls() + + def _on_disconnect_modbus_action_complete( + self, + result: object | None, + error: Exception | None, + ) -> None: + final_error = error + if final_error is None and isinstance(result, Exception): + final_error = result + self._on_disconnect_modbus_complete(final_error) + + def _disconnect_modbus(self) -> None: + if self._modbus_busy: + return + if self._modbus_write_busy: + return + + service = self._modbus + self._modbus_busy = True + self._set_modbus_error_text("") + self._clear_live_values_all_panels() + self._update_modbus_controls() + + if service is None: + self._on_disconnect_modbus_complete(None) + return + + self._run_modbus_action( + lambda: self._disconnect_modbus_service(service), + self._on_disconnect_modbus_action_complete, + ) + + def _toggle_modbus_connection(self) -> None: + if self._is_modbus_connected(): + self._disconnect_modbus() + else: + self._connect_modbus() + + def _execute_write_payload(self, rows: list[tuple[str, PlcValue]]): + if self._modbus is None: + raise OSError("Not connected") + return self._modbus.write(rows) + + def _on_write_payload_complete( + self, + panel: DataviewPanel, + results, + error: Exception | None, + ) -> None: + self._modbus_write_busy = False + if error is not None: + # Transport-level failure — assume connection is lost. + self._connection_state = ConnectionState.DISCONNECTED + self._clear_live_values_all_panels() + self._set_modbus_error_text(str(error)) + self._update_modbus_controls() + return + + results = results or [] + if results and all(result.get("ok") for result in results): + if panel in self._iter_open_panels(): + panel.clear_write_checks() + self._set_modbus_error_text("") + else: + first_error = next( + (result.get("error") for result in results if not result.get("ok")), None + ) + if first_error: + self._set_modbus_error_text(str(first_error)) + self._update_modbus_controls() + + def _write_payload(self, rows: list[tuple[str, PlcValue]]) -> None: + panel = self._get_current_panel() + if ( + panel is None + or not rows + or self._modbus is None + or not self._is_modbus_connected() + or self._modbus_busy + or self._modbus_write_busy + ): + return + + self._modbus_write_busy = True + self._set_modbus_error_text("") + self._update_modbus_controls() + self._run_modbus_action( + lambda: self._execute_write_payload(rows), + lambda result, error: self._on_write_payload_complete(panel, result, error), + ) + + def _write_checked(self) -> None: + panel = self._get_current_panel() + if panel is None: + return + self._write_payload(panel.get_write_rows()) + + def _write_all(self) -> None: + panel = self._get_current_panel() + if panel is None: + return + self._write_payload(panel.get_write_all_rows()) + def _open_dataview(self, file_path: Path) -> None: """Open a dataview file in a new tab. @@ -102,10 +434,15 @@ def _open_dataview(self, file_path: Path) -> None: self.notebook, file_path=file_path, on_modified=self._on_panel_modified, + on_addresses_changed=self._on_panel_addresses_changed, nickname_lookup=self.shared_data.lookup_nickname, address_normalizer=self.shared_data.normalize_address, ) + # Sync Modbus column visibility before adding tab + if not self._modbus_toolbar_var.get(): + panel.set_modbus_columns_visible(False) + # Add tab self.notebook.add(panel, text=file_path.stem) self.notebook.select(panel) @@ -125,11 +462,16 @@ def _new_dataview(self) -> None: self.notebook, file_path=None, on_modified=self._on_panel_modified, + on_addresses_changed=self._on_panel_addresses_changed, nickname_lookup=self.shared_data.lookup_nickname, address_normalizer=self.shared_data.normalize_address, name=name, ) + # Sync Modbus column visibility before adding tab + if not self._modbus_toolbar_var.get(): + panel.set_modbus_columns_visible(False) + self.notebook.add(panel, text=name) self.notebook.select(panel) @@ -211,38 +553,27 @@ def _export(self) -> None: # Same as Save As for now self._save_as() + def _close_tab_at_index(self, tab_index: int) -> None: + """Close the tab at the given index. + + Args: + tab_index: Index of the tab to close + """ + try: + self.notebook._try_close_tab(tab_index) # pyright: ignore[reportAttributeAccessIssue] + except (AttributeError, tk.TclError, IndexError): + pass + def _close_current_tab(self) -> None: """Close the current tab.""" - panel = self._get_current_panel() - if not panel: - return - - # Check for unsaved changes - if panel.is_dirty: - result = messagebox.askyesnocancel( - "Unsaved Changes", - f"'{panel.name}' has unsaved changes. Save before closing?", - parent=self, - ) - if result is None: # Cancel + try: + tab_id = self.notebook.select() + if not tab_id: return - if result: # Yes - save - if panel.file_path: - panel.save() - else: - self._save_as() - if panel.is_dirty: # User cancelled save dialog - return - - # Remove from tracking - if panel.file_path in self._open_panels: - del self._open_panels[panel.file_path] - elif None in self._open_panels and self._open_panels[None] == panel: - del self._open_panels[None] - - # Close tab - self.notebook.forget(panel) - panel.destroy() + tab_index = self.notebook.index(tab_id) + except tk.TclError: + return + self._close_tab_at_index(tab_index) def _clear_selected_rows(self) -> None: """Clear selected rows in the current panel.""" @@ -278,6 +609,14 @@ def _on_close(self) -> None: self._nav_window.destroy() self._nav_window = None + # Detach Modbus service and clean up on a daemon thread to avoid + # deadlocking the Tcl interpreter (service callbacks use self.after). + service = self._modbus + self._modbus = None + self._clear_live_values_all_panels() + if service is not None: + self._run_background(self._disconnect_modbus_service, service) + # Unregister from shared data self.shared_data.unregister_window(self) @@ -323,6 +662,19 @@ def _on_outline_select(self, path: str, leaves: list[tuple[str, int]]) -> None: """ self._insert_addresses(leaves) + @staticmethod + def _get_paired_prompt_type(leaves: list[tuple[str, int]]) -> tuple[str, str] | None: + """Return (source_type, paired_type) when paired insert prompt should be shown.""" + memory_types = {memory_type for memory_type, _address in leaves} + if len(memory_types) != 1: + return None + + source_type = next(iter(memory_types)) + if source_type not in ("T", "CT"): + return None + + return source_type, INTERLEAVED_PAIRS[source_type] + def _on_block_select(self, leaves: list[tuple[str, int]]) -> None: """Handle block selection from NavWindow - insert all block addresses. @@ -334,22 +686,16 @@ def _on_block_select(self, leaves: list[tuple[str, int]]) -> None: if not leaves: return - # Check if this is a T or CT block (all addresses same type for a block) - memory_types = {mem_type for mem_type, _ in leaves} - - # If it's a T or CT block, offer to include paired data type (TD/CTD) include_paired = False paired_type = None - for mem_type in memory_types: - # Only prompt for bit types (T, CT), not data types (TD, CTD) - if mem_type in ("T", "CT"): - paired_type = INTERLEAVED_PAIRS[mem_type] - include_paired = messagebox.askyesno( - "Include Paired Type", - f"Also insert {paired_type} addresses with this {mem_type} block?", - parent=self, - ) - break # Only one type per block + pair_prompt = self._get_paired_prompt_type(leaves) + if pair_prompt is not None: + source_type, paired_type = pair_prompt + include_paired = messagebox.askyesno( + "Include Paired Type", + f"Also insert {paired_type} addresses with this {source_type} block?", + parent=self, + ) # Build address list, interleaving if paired type requested if include_paired and paired_type: @@ -385,6 +731,27 @@ def _toggle_nav(self) -> None: self._nav_window._dock_to_parent() self._tag_browser_var.set(True) + def _toggle_modbus_toolbar(self) -> None: + """Toggle Modbus toolbar and column visibility.""" + visible = self._modbus_toolbar_var.get() + + if visible: + self.modbus_toolbar.pack(fill=tk.X, padx=5, pady=(2, 0), before=self.notebook) + else: + self.modbus_toolbar.pack_forget() + + # Show/hide Modbus columns on all open panels + for panel in self._iter_open_panels(): + panel.set_modbus_columns_visible(visible) + + # Widen window when showing Modbus columns, shrink when hiding + if visible: + w = max(self.winfo_width(), 1100) + self.geometry(f"{w}x{self.winfo_height()}") + else: + w = max(self.winfo_width() - 300, 800) + self.geometry(f"{w}x{self.winfo_height()}") + def _create_menu(self) -> None: """Create the menu bar.""" menubar = tk.Menu(self) @@ -434,6 +801,17 @@ def _create_menu(self) -> None: variable=self._tag_browser_var, command=self._toggle_nav, ) + view_menu.add_checkbutton( + label="Modbus Toolbar", + variable=self._modbus_toolbar_var, + command=self._toggle_modbus_toolbar, + ) + + # Connection menu (secondary entry points for toolbar connection actions) + self.connection_menu = tk.Menu(menubar, tearoff=0) + menubar.add_cascade(label="Connection", menu=self.connection_menu) + self.connection_menu.add_command(label="Connect", command=self._connect_modbus) + self.connection_menu.add_command(label="Disconnect", command=self._disconnect_modbus) def _open_selected(self) -> None: """Open the selected file from the list.""" @@ -451,7 +829,12 @@ def _on_file_double_click(self, event) -> None: def _on_tab_changed(self, event) -> None: """Handle tab change event.""" - pass # Could update window title, etc. + current_panel = self._get_current_panel() + if self._active_panel is not None and self._active_panel is not current_panel: + self._active_panel.clear_live_values() + + self._active_panel = current_panel + self._sync_poll_addresses_from_active_tab() def _on_tab_close_request(self, tab_index: int) -> bool: """Handle close button click on a tab. @@ -491,6 +874,8 @@ def _on_tab_close_request(self, tab_index: int) -> bool: elif None in self._open_panels and self._open_panels[None] == panel: del self._open_panels[None] + self._pending_closed_panel = panel + return True except (tk.TclError, IndexError): @@ -498,48 +883,16 @@ def _on_tab_close_request(self, tab_index: int) -> bool: def _on_tab_closed(self, event) -> None: """Handle tab closed event (after the tab is removed).""" - pass # Cleanup already done in _on_tab_close_request - - def _close_tab_at_index(self, tab_index: int) -> None: - """Close the tab at the given index. + if self._pending_closed_panel is not None: + self._pending_closed_panel.clear_live_values() + try: + self._pending_closed_panel.destroy() + except Exception: + pass + self._pending_closed_panel = None - Args: - tab_index: Index of the tab to close - """ - try: - tab_id = self.notebook.tabs()[tab_index] - panel = self.notebook.nametowidget(tab_id) - - # Check for unsaved changes - if panel.is_dirty: - result = messagebox.askyesnocancel( - "Unsaved Changes", - f"'{panel.name}' has unsaved changes. Save before closing?", - parent=self, - ) - if result is None: # Cancel - return - if result: # Yes - save - if panel.file_path: - panel.save() - else: - # Need to save as - self.notebook.select(tab_index) - self._save_as() - if panel.is_dirty: # User cancelled save dialog - return - - # Remove from tracking - if panel.file_path in self._open_panels: - del self._open_panels[panel.file_path] - elif None in self._open_panels and self._open_panels[None] == panel: - del self._open_panels[None] - - # Close tab - self.notebook.forget(panel) - panel.destroy() - except (tk.TclError, IndexError): - pass + self._active_panel = self._get_current_panel() + self._sync_poll_addresses_from_active_tab() def _on_tab_right_click(self, event) -> None: """Handle right-click on notebook tab - show close menu.""" @@ -696,6 +1049,54 @@ def _create_widgets(self) -> None: side=tk.LEFT, padx=(0, 5) ) + # Modbus toggle (right side of toolbar) + modbus_cb = ttk.Checkbutton( + toolbar, + text="⚡ Modbus", + variable=self._modbus_toolbar_var, + command=self._toggle_modbus_toolbar, + ) + modbus_cb.configure(style="Modbus.TCheckbutton") + ttk.Style().configure("Modbus.TCheckbutton", font=("TkDefaultFont", 9, "bold")) + modbus_cb.pack(side=tk.RIGHT, padx=(5, 0)) + + # Modbus toolbar (separate row, hidden by default) + self.modbus_toolbar = ttk.Frame(self.content) + + ttk.Label(self.modbus_toolbar, text="Host:").pack(side=tk.LEFT) + self.host_entry = ttk.Entry( + self.modbus_toolbar, textvariable=self._modbus_host_var, width=16 + ) + self.host_entry.pack(side=tk.LEFT, padx=(3, 6)) + + ttk.Label(self.modbus_toolbar, text="Port:").pack(side=tk.LEFT) + self.port_entry = ttk.Entry( + self.modbus_toolbar, textvariable=self._modbus_port_var, width=6 + ) + self.port_entry.pack(side=tk.LEFT, padx=(3, 6)) + + self.modbus_connect_button = ttk.Button( + self.modbus_toolbar, + textvariable=self._modbus_toggle_var, + command=self._toggle_modbus_connection, + width=10, + ) + self.modbus_connect_button.pack(side=tk.LEFT, padx=(0, 6)) + + self.write_checked_button = ttk.Button( + self.modbus_toolbar, + text="💾 Write", + command=self._write_checked, + ) + self.write_checked_button.pack(side=tk.LEFT, padx=(0, 4)) + + self.write_all_button = ttk.Button( + self.modbus_toolbar, + text="💾 Write All", + command=self._write_all, + ) + self.write_all_button.pack(side=tk.LEFT) + # Notebook for tabs (with close buttons) self.notebook = CustomNotebook(self.content, on_close_callback=self._on_tab_close_request) self.notebook.pack(fill=tk.BOTH, expand=True, pady=(5, 0)) @@ -760,6 +1161,18 @@ def __init__( # Navigation window self._nav_window: NavWindow | None = None + self._active_panel: DataviewPanel | None = None + self._pending_closed_panel: DataviewPanel | None = None + + # Modbus integration state + self._modbus: ModbusService | None = None + self._modbus_busy = False + self._modbus_write_busy = False + self._connection_state = ConnectionState.DISCONNECTED + self._modbus_host_var = tk.StringVar(value="127.0.0.1") + self._modbus_port_var = tk.StringVar(value="502") + self._modbus_toggle_var = tk.StringVar(value="Connect") + self._modbus_toolbar_var = tk.BooleanVar(value=False) # Configure window self._setup_window() @@ -781,6 +1194,8 @@ def __init__( # Open Tag Browser by default self.after(100, self._toggle_nav) + self._update_modbus_controls() + def refresh_nicknames_from_shared(self) -> None: """Called by SharedDataviewData when SharedAddressData changes. diff --git a/src/clicknick/views/nav_window/block_logic.py b/src/clicknick/views/nav_window/block_logic.py new file mode 100644 index 0000000..8a02780 --- /dev/null +++ b/src/clicknick/views/nav_window/block_logic.py @@ -0,0 +1,205 @@ +"""Tree-building helpers for the Blocks panel. + +Converts flat BlockRange results into a renderable tree model: +- Flat blocks remain top-level actionable rows. +- UDT-style names (Base.field) become Base parent nodes with field children. +""" + +from __future__ import annotations + +from collections.abc import Sequence +from dataclasses import dataclass + +from pyclickplc.blocks import BlockRange, group_udt_block_names, parse_structured_block_name + +from ...models.address_row import AddressRow + + +@dataclass(frozen=True) +class BlockTreeNode: + """Renderable block tree node for the Blocks panel.""" + + node_id: str + text: str + addresses: tuple[tuple[str, int], ...] + bg_color: str | None = None + children: tuple[BlockTreeNode, ...] = () + is_group: bool = False + start_idx: int = 0 + + +@dataclass(frozen=True) +class _BlockEntry: + """Internal normalized block entry used for grouping/sorting.""" + + idx: int + name: str + base: str + field: str | None + kind: str + start_idx: int + bg_color: str | None + addresses: tuple[tuple[str, int], ...] + start_display: str + end_display: str + + +def _format_with_range(label: str, start: str, end: str) -> str: + """Format block text with single-point or range display.""" + if start == end: + return f"{label} ({start})" + return f"{label} ({start}-{end})" + + +def _dedupe_addresses( + addresses: list[tuple[str, int]] | tuple[tuple[str, int], ...], +) -> tuple[tuple[str, int], ...]: + """Deduplicate addresses preserving first occurrence order.""" + seen: set[tuple[str, int]] = set() + result: list[tuple[str, int]] = [] + for item in addresses: + if item in seen: + continue + seen.add(item) + result.append(item) + return tuple(result) + + +def _build_entries(ranges: list[BlockRange], rows: Sequence[AddressRow]) -> list[_BlockEntry]: + """Build normalized entries from matched block ranges.""" + entries: list[_BlockEntry] = [] + for idx, block in enumerate(ranges): + block_rows = rows[block.start_idx : block.end_idx + 1] + if not block_rows: + continue + + parsed = parse_structured_block_name(block.name) + field = parsed.field if parsed.kind == "udt" else None + addresses = tuple((row.memory_type, row.address) for row in block_rows) + + entries.append( + _BlockEntry( + idx=idx, + name=block.name, + base=parsed.base, + field=field, + kind=parsed.kind, + start_idx=block.start_idx, + bg_color=block.bg_color, + addresses=addresses, + start_display=block_rows[0].display_address, + end_display=block_rows[-1].display_address, + ) + ) + return entries + + +def _build_flat_node(entry: _BlockEntry) -> BlockTreeNode: + """Create a top-level actionable node for non-UDT names.""" + text = _format_with_range(entry.name, entry.start_display, entry.end_display) + return BlockTreeNode( + node_id=f"flat:{entry.idx}", + text=text, + addresses=entry.addresses, + bg_color=entry.bg_color, + start_idx=entry.start_idx, + ) + + +def _build_udt_node(base: str, entries: list[_BlockEntry], *, sort_alphabetically: bool) -> BlockTreeNode: + """Create one UDT parent node and its field children.""" + field_order_map = group_udt_block_names(entry.name for entry in entries) + field_order = list(field_order_map.get(base, ())) + + by_field: dict[str, list[_BlockEntry]] = {} + fallback_field_order: list[str] = [] + for entry in entries: + if entry.field is None: + continue + by_field.setdefault(entry.field, []).append(entry) + if entry.field not in fallback_field_order: + fallback_field_order.append(entry.field) + + if sort_alphabetically: + ordered_fields = sorted(by_field.keys(), key=str.lower) + else: + seen = set(field_order) + ordered_fields = field_order + [field for field in fallback_field_order if field not in seen] + + children: list[BlockTreeNode] = [] + for field in ordered_fields: + field_entries = by_field.get(field, []) + if sort_alphabetically: + field_entries = sorted(field_entries, key=lambda item: item.start_idx) + + for entry in field_entries: + child_text = _format_with_range(field, entry.start_display, entry.end_display) + children.append( + BlockTreeNode( + node_id=f"udt:{base}:{field}:{entry.start_idx}:{entry.idx}", + text=child_text, + addresses=entry.addresses, + bg_color=entry.bg_color, + start_idx=entry.start_idx, + ) + ) + + parent_addresses: list[tuple[str, int]] = [] + for child in children: + parent_addresses.extend(child.addresses) + + first_start_idx = min((entry.start_idx for entry in entries), default=0) + return BlockTreeNode( + node_id=f"udt:{base}", + text=base, + addresses=_dedupe_addresses(parent_addresses), + children=tuple(children), + is_group=True, + start_idx=first_start_idx, + ) + + +def build_block_tree( + ranges: list[BlockRange], + rows: Sequence[AddressRow], + *, + sort_alphabetically: bool, +) -> list[BlockTreeNode]: + """Build display tree nodes from block ranges and row data.""" + entries = _build_entries(ranges, rows) + + flat_entries: list[_BlockEntry] = [] + udt_entries_by_base: dict[str, list[_BlockEntry]] = {} + top_level_order: list[tuple[str, str | int]] = [] + seen_bases: set[str] = set() + + for entry in entries: + if entry.kind == "udt" and entry.field is not None: + udt_entries_by_base.setdefault(entry.base, []).append(entry) + if entry.base not in seen_bases: + top_level_order.append(("udt", entry.base)) + seen_bases.add(entry.base) + continue + + flat_entries.append(entry) + top_level_order.append(("flat", entry.idx)) + + flat_nodes = {entry.idx: _build_flat_node(entry) for entry in flat_entries} + udt_nodes = { + base: _build_udt_node(base, base_entries, sort_alphabetically=sort_alphabetically) + for base, base_entries in udt_entries_by_base.items() + } + + if sort_alphabetically: + nodes = list(udt_nodes.values()) + list(flat_nodes.values()) + return sorted(nodes, key=lambda node: (node.text.lower(), node.start_idx)) + + result: list[BlockTreeNode] = [] + for kind, key in top_level_order: + if kind == "udt": + node = udt_nodes.get(key) # type: ignore[arg-type] + else: + node = flat_nodes.get(key) # type: ignore[arg-type] + if node is not None: + result.append(node) + return result diff --git a/src/clicknick/views/nav_window/block_panel.py b/src/clicknick/views/nav_window/block_panel.py index 00e45aa..d9f1fcb 100644 --- a/src/clicknick/views/nav_window/block_panel.py +++ b/src/clicknick/views/nav_window/block_panel.py @@ -12,6 +12,7 @@ from ...models.address_row import AddressRow from ...services.block_service import compute_all_block_ranges from ...widgets.colors import get_block_color_hex +from .block_logic import BlockTreeNode, build_block_tree class BlockPanel(ttk.Frame): @@ -32,6 +33,22 @@ def _on_sort_changed(self) -> None: if self._all_rows_cache is not None: self.build_tree(self._all_rows_cache) + def _get_expanded_node_ids(self) -> set[str]: + """Get currently expanded node ids (for restoring tree state).""" + expanded: set[str] = set() + for iid, node in self._node_data.items(): + if node.children and self.tree.item(iid, "open"): + expanded.add(node.node_id) + return expanded + + def _restore_expanded_node_ids(self, expanded_node_ids: set[str]) -> None: + """Restore expanded state for matching nodes.""" + if not expanded_node_ids: + return + for iid, node in self._node_data.items(): + if node.children and node.node_id in expanded_node_ids: + self.tree.item(iid, open=True) + def _create_widgets(self) -> None: header = ttk.Label(self, text="Memory Blocks", font=("TkDefaultFont", 9, "bold")) header.pack(fill=tk.X, padx=5, pady=(5, 2)) @@ -82,17 +99,15 @@ def __init__( super().__init__(parent) self.on_select = on_select self._block_data: dict[str, list[tuple[str, int]]] = {} + self._node_data: dict[str, BlockTreeNode] = {} self._configured_colors: set[str] = set() self._sort_alphabetically = tk.BooleanVar(value=False) self._all_rows_cache: dict[int, AddressRow] | None = None self._create_widgets() - def _render_block(self, name: str, color_name: str | None, rows: list[AddressRow]) -> None: - """Insert a block summary row into the treeview.""" - if not rows: - return - + def _ensure_color_tag(self, color_name: str | None) -> str: + """Create/get a tree tag for a block background color.""" # Convert color name to hex hex_color = None if color_name: @@ -105,31 +120,42 @@ def _render_block(self, name: str, color_name: str | None, rows: list[AddressRow if tag_name not in self._configured_colors: self.tree.tag_configure(tag_name, background=hex_color) self._configured_colors.add(tag_name) + return tag_name + + @staticmethod + def _format_node_text(node: BlockTreeNode) -> str: + """Format node text for display in treeview.""" + if node.is_group: + return node.text + prefix = "📦" if len(node.addresses) > 1 else "■" + return f"{prefix} {node.text}" + + def _insert_node(self, node: BlockTreeNode, parent_iid: str = "") -> None: + """Insert node and children recursively into the treeview.""" + tag_name = self._ensure_color_tag(node.bg_color) + iid = self.tree.insert( + parent_iid, + tk.END, + text=self._format_node_text(node), + tags=(tag_name,), + ) + self._node_data[iid] = node - # Format like Sidebar (Name + Range) - start_row = rows[0] - end_row = rows[-1] - - # Single point vs Range - if len(rows) > 1: - prefix = "📦" - display_text = f"{name} ({start_row.display_address}-{end_row.display_address})" - else: - prefix = "■" - display_text = f"{name} ({start_row.display_address})" - - # Insert as a single item - iid = self.tree.insert("", tk.END, text=f"{prefix} {display_text}", tags=(tag_name,)) + if node.addresses: + self._block_data[iid] = list(node.addresses) - # Store all addresses in the block - self._block_data[iid] = [(row.memory_type, row.address) for row in rows] + for child in node.children: + self._insert_node(child, iid) def build_tree(self, all_rows: dict[int, AddressRow]) -> None: """Parse comments and rebuild the blocks tree.""" + expanded_node_ids = self._get_expanded_node_ids() + # Cache the data for re-sorting self._all_rows_cache = all_rows self._block_data.clear() + self._node_data.clear() for item in self.tree.get_children(): self.tree.delete(item) @@ -139,15 +165,15 @@ def build_tree(self, all_rows: dict[int, AddressRow]) -> None: # Use centralized block matching ranges = compute_all_block_ranges(rows_list) + nodes = build_block_tree( + ranges, + rows_list, + sort_alphabetically=self._sort_alphabetically.get(), + ) + for node in nodes: + self._insert_node(node) - # Sort alphabetically if requested - if self._sort_alphabetically.get(): - ranges = sorted(ranges, key=lambda block: block.name.lower()) - - # Render each block - for block in ranges: - block_rows = rows_list[block.start_idx : block.end_idx + 1] - self._render_block(block.name, block.bg_color, block_rows) + self._restore_expanded_node_ids(expanded_node_ids) def refresh(self, all_rows: dict[int, AddressRow]) -> None: """Refresh the tree with updated data.""" diff --git a/src/clicknick/widgets/add_block_dialog.py b/src/clicknick/widgets/add_block_dialog.py index c49e5a8..439177a 100644 --- a/src/clicknick/widgets/add_block_dialog.py +++ b/src/clicknick/widgets/add_block_dialog.py @@ -1,5 +1,6 @@ import random import tkinter as tk +from collections.abc import Callable from tkinter import messagebox, ttk from .colors import BLOCK_COLOR_NAMES, BLOCK_COLORS @@ -35,6 +36,13 @@ def _on_ok(self) -> None: messagebox.showerror("Error", "Please enter a block name.", parent=self) return + # Check for duplicate name if validator provided + if self._validate_name is not None: + is_valid, error_msg = self._validate_name(name) + if not is_valid: + messagebox.showerror("Duplicate Block Name", error_msg, parent=self) + return + self.result = (name, self._selected_color) self.destroy() @@ -108,7 +116,19 @@ def _create_widgets(self) -> None: ) ttk.Button(btn_frame, text="Cancel", command=self._on_cancel, width=10).pack(side=tk.RIGHT) - def __init__(self, parent: tk.Widget): + def __init__( + self, + parent: tk.Widget, + validate_name: Callable[[str], tuple[bool, str]] | None = None, + ): + """Initialize the Add Block dialog. + + Args: + parent: Parent widget + validate_name: Optional callback to validate block name. + Should return (is_valid, error_message). + If None, no validation is performed. + """ super().__init__(parent) self.title("Add Block") self.resizable(False, False) @@ -118,6 +138,7 @@ def __init__(self, parent: tk.Widget): self.result: tuple[str, str | None] | None = None # (name, color) or None if cancelled self._selected_color: str | None = None self._color_buttons: dict[str, tk.Button] = {} + self._validate_name = validate_name self._create_widgets() diff --git a/src/clicknick/widgets/import_csv_dialog.py b/src/clicknick/widgets/import_csv_dialog.py index d4bf32d..ee65fa0 100644 --- a/src/clicknick/widgets/import_csv_dialog.py +++ b/src/clicknick/widgets/import_csv_dialog.py @@ -7,12 +7,11 @@ from tkinter import filedialog, messagebox, ttk from typing import TYPE_CHECKING +from pyclickplc.banks import MEMORY_TYPE_BASES +from pyclickplc.blocks import BlockRange, compute_all_block_ranges from tksheet import Sheet, num2alpha from ..data.data_source import CsvDataSource -from ..models.blocktag import BlockRange -from ..models.constants import MEMORY_TYPE_BASES -from ..services.block_service import compute_all_block_ranges if TYPE_CHECKING: from ..models.address_row import AddressRow diff --git a/tests/test_address_model.py b/tests/test_address_model.py index ee3caee..1ea8601 100644 --- a/tests/test_address_model.py +++ b/tests/test_address_model.py @@ -1,8 +1,22 @@ """Unit tests for address_editor.address_model module.""" import pytest +from pyclickplc.addresses import ( + get_addr_key, + parse_addr_key, +) +from pyclickplc.banks import ( + BANKS, + DEFAULT_RETENTIVE, + MEMORY_TYPE_BASES, + MEMORY_TYPE_TO_DATA_TYPE, + NON_EDITABLE_TYPES, + PAIRED_RETENTIVE_TYPES, + DataType, +) +from pyclickplc.validation import COMMENT_MAX_LENGTH, FORBIDDEN_CHARS, NICKNAME_MAX_LENGTH -from clicknick.models.address_row import AddressRow, get_addr_key, parse_addr_key +from clicknick.models.address_row import AddressRow from clicknick.models.blocktag import ( BlockTag, extract_block_name, @@ -11,18 +25,6 @@ parse_block_tag, strip_block_tag, ) -from clicknick.models.constants import ( - ADDRESS_RANGES, - COMMENT_MAX_LENGTH, - DEFAULT_RETENTIVE, - FORBIDDEN_CHARS, - MEMORY_TYPE_BASES, - MEMORY_TYPE_TO_DATA_TYPE, - NICKNAME_MAX_LENGTH, - NON_EDITABLE_TYPES, - PAIRED_RETENTIVE_TYPES, - DataType, -) from clicknick.models.validation import ( validate_comment, validate_initial_value, @@ -103,11 +105,11 @@ def test_parse_addr_key_roundtrip(self): def test_xd_yd_display_functions(self): """Test XD/YD display address formatting and parsing.""" - from clicknick.models.address_row import ( + from pyclickplc.addresses import ( format_address_display, is_xd_yd_hidden_slot, is_xd_yd_upper_byte, - parse_address_display, + parse_address, xd_yd_display_to_mdb, xd_yd_mdb_to_display, ) @@ -157,19 +159,20 @@ def test_xd_yd_display_functions(self): assert format_address_display("YD", 1) == "YD0u" assert format_address_display("DS", 100) == "DS100" # Non-XD/YD unchanged - # Test parse_address_display - assert parse_address_display("XD0") == ("XD", 0) - assert parse_address_display("XD0u") == ("XD", 1) - assert parse_address_display("XD1") == ("XD", 2) - assert parse_address_display("XD2") == ("XD", 4) - assert parse_address_display("XD8") == ("XD", 16) - assert parse_address_display("YD0u") == ("YD", 1) - assert parse_address_display("YD8") == ("YD", 16) - assert parse_address_display("DS100") == ("DS", 100) - assert parse_address_display("X001") == ("X", 1) - assert parse_address_display("XD1u") is None # Invalid: only XD0 has u variant - assert parse_address_display("INVALID") is None - assert parse_address_display("") is None + # Test parse_address + assert parse_address("XD0") == ("XD", 0) + assert parse_address("XD0u") == ("XD", 1) + assert parse_address("XD1") == ("XD", 2) + assert parse_address("XD2") == ("XD", 4) + assert parse_address("XD8") == ("XD", 16) + assert parse_address("YD0u") == ("YD", 1) + assert parse_address("YD8") == ("YD", 16) + assert parse_address("DS100") == ("DS", 100) + assert parse_address("X001") == ("X", 1) + with pytest.raises(ValueError): + assert parse_address("XD1u") is None # Invalid: only XD0 has u variant + assert parse_address("INVALID") is None + assert parse_address("") is None def test_parse_addr_key_invalid_type_index(self): """Test that invalid type index raises KeyError.""" @@ -385,33 +388,33 @@ def test_replace_creates_new_row(self): class TestAddressRanges: - """Tests for ADDRESS_RANGES constant.""" + """Tests for BANKS constant.""" def test_all_types_have_ranges(self): - """All memory types should have defined ranges.""" - assert len(ADDRESS_RANGES) == 16 + """All memory types should have defined banks.""" + assert len(BANKS) == 16 for memory_type in MEMORY_TYPE_BASES: - assert memory_type in ADDRESS_RANGES + assert memory_type in BANKS def test_xd_yd_start_at_zero(self): """XD and YD should start at 0.""" - assert ADDRESS_RANGES["XD"][0] == 0 - assert ADDRESS_RANGES["YD"][0] == 0 + assert BANKS["XD"].min_addr == 0 + assert BANKS["YD"].min_addr == 0 def test_other_types_start_at_one(self): """Most types should start at 1.""" - for memory_type, (start, _) in ADDRESS_RANGES.items(): + for memory_type, bank in BANKS.items(): if memory_type not in ("XD", "YD"): - assert start == 1, f"{memory_type} should start at 1" + assert bank.min_addr == 1, f"{memory_type} should start at 1" def test_specific_ranges(self): """Test specific expected ranges.""" - assert ADDRESS_RANGES["X"] == (1, 816) - assert ADDRESS_RANGES["Y"] == (1, 816) - assert ADDRESS_RANGES["C"] == (1, 2000) - assert ADDRESS_RANGES["DS"] == (1, 4500) - assert ADDRESS_RANGES["T"] == (1, 500) - assert ADDRESS_RANGES["CT"] == (1, 250) + assert (BANKS["X"].min_addr, BANKS["X"].max_addr) == (1, 816) + assert (BANKS["Y"].min_addr, BANKS["Y"].max_addr) == (1, 816) + assert (BANKS["C"].min_addr, BANKS["C"].max_addr) == (1, 2000) + assert (BANKS["DS"].min_addr, BANKS["DS"].max_addr) == (1, 4500) + assert (BANKS["T"].min_addr, BANKS["T"].max_addr) == (1, 500) + assert (BANKS["CT"].min_addr, BANKS["CT"].max_addr) == (1, 250) class TestValidateInitialValue: @@ -926,3 +929,71 @@ def test_parse_block_tag_with_bg_attribute(self): "", "#FFCCBC", ) + + +class TestAddressRowDerivedProperties: + """Tests for AddressRow derived properties: outline_suffix, is_initial_value_masked, is_interleaved_secondary.""" + + # --- outline_suffix --- + + def test_outline_suffix_bit_default(self): + """BIT with default values shows just ': BIT'.""" + row = AddressRow(memory_type="X", address=1, data_type=DataType.BIT) + assert row.outline_suffix == " : BIT" + + def test_outline_suffix_bit_retentive(self): + """BIT with non-default retentive shows '= Retentive'.""" + row = AddressRow(memory_type="X", address=1, data_type=DataType.BIT, retentive=True) + assert row.outline_suffix == " : BIT = Retentive" + + def test_outline_suffix_bit_on(self): + """BIT with initial_value='1' shows '= ON'.""" + row = AddressRow(memory_type="X", address=1, data_type=DataType.BIT, initial_value="1") + assert row.outline_suffix == " : BIT = ON" + + def test_outline_suffix_int_with_value(self): + """INT with non-default retentive shows '= '.""" + # DS default retentive is True; set retentive=False for non-default + row = AddressRow( + memory_type="DS", address=1, data_type=DataType.INT, initial_value="42", retentive=False + ) + assert row.outline_suffix == " : INT = 42" + + # --- is_initial_value_masked --- + + def test_is_initial_value_masked_non_editable(self): + """NON_EDITABLE_TYPES are never masked (they just can't be edited).""" + row = AddressRow(memory_type="SC", address=1, data_type=DataType.BIT) + assert row.is_initial_value_masked(effective_retentive=True) is False + + def test_is_initial_value_masked_retentive_true(self): + """Editable type with effective_retentive=True is masked.""" + row = AddressRow(memory_type="DS", address=1, data_type=DataType.INT) + assert row.is_initial_value_masked(effective_retentive=True) is True + + def test_is_initial_value_masked_retentive_false(self): + """Editable type with effective_retentive=False is not masked.""" + row = AddressRow(memory_type="DS", address=1, data_type=DataType.INT) + assert row.is_initial_value_masked(effective_retentive=False) is False + + # --- is_interleaved_secondary --- + + def test_is_interleaved_secondary_td(self): + """TD is a secondary interleaved type.""" + row = AddressRow(memory_type="TD", address=1, data_type=DataType.INT) + assert row.is_interleaved_secondary is True + + def test_is_interleaved_secondary_ctd(self): + """CTD is a secondary interleaved type.""" + row = AddressRow(memory_type="CTD", address=1, data_type=DataType.INT2) + assert row.is_interleaved_secondary is True + + def test_is_interleaved_secondary_t(self): + """T is NOT a secondary interleaved type (it's primary).""" + row = AddressRow(memory_type="T", address=1, data_type=DataType.BIT) + assert row.is_interleaved_secondary is False + + def test_is_interleaved_secondary_ds(self): + """DS is NOT interleaved at all.""" + row = AddressRow(memory_type="DS", address=1, data_type=DataType.INT) + assert row.is_interleaved_secondary is False diff --git a/tests/test_address_store.py b/tests/test_address_store.py index dabe374..802968d 100644 --- a/tests/test_address_store.py +++ b/tests/test_address_store.py @@ -1,10 +1,11 @@ """Tests for AddressStore with base/overlay architecture.""" import pytest +from pyclickplc.addresses import get_addr_key from clicknick.data.address_store import AddressStore from clicknick.data.undo_frame import MAX_UNDO_DEPTH -from clicknick.models.address_row import get_addr_key +from clicknick.models.address_row import AddressRow class MockDataSource: @@ -37,8 +38,6 @@ def store(): def store_with_data(): """Create a store with pre-loaded data in base_state.""" - from clicknick.models.address_row import AddressRow - # Create initial rows that simulate being loaded from database addr_key_1 = get_addr_key("X", 1) addr_key_2 = get_addr_key("X", 2) @@ -370,6 +369,169 @@ def observer(sender, affected_keys): assert len(notifications) == 0 +class TestSystemNicknameValidation: + """Regression tests for system nickname validation behavior.""" + + @pytest.mark.parametrize( + ("memory_type", "nickname"), + ( + ("SC", "Comm/Port_1"), + ("SD", "_Fixed_Scan_Time(ms)"), + ("X", "_IO1_Module_Error"), + ), + ) + def test_loaded_system_nickname_is_valid(self, memory_type, nickname): + """System nicknames from DB should not be reported as invalid.""" + addr_key = get_addr_key(memory_type, 1) + data_source = MockDataSource( + { + addr_key: AddressRow( + memory_type=memory_type, + address=1, + nickname=nickname, + ) + } + ) + store = AddressStore(data_source) + store.load_initial_data() + + row = store.visible_state[addr_key] + assert row.nickname_valid is True + assert row.nickname_error == "" + assert row.is_valid is True + + @pytest.mark.parametrize( + ("memory_type", "nickname"), + ( + ("SC", "Comm/Port_1"), + ("SD", "_Fixed_Scan_Time(ms)"), + ), + ) + def test_user_entered_sc_sd_system_nickname_is_invalid(self, store, memory_type, nickname): + """SC/SD system-style nicknames should not be accepted as user edits.""" + addr_key = get_addr_key(memory_type, 1) + with store.edit_session("Set SC/SD system nickname") as session: + session.set_field(addr_key, "nickname", nickname) + + row = store.visible_state[addr_key] + assert row.nickname_valid is False + assert row.nickname_error != "" + assert row.is_valid is False + + @pytest.mark.parametrize( + ("memory_type", "base_nickname", "edited_nickname"), + ( + ("SC", "Comm/Port_1", "Comm/Port_2"), + ("SD", "_Fixed_Scan_Time(ms)", "_Scan_Time(ms)"), + ), + ) + def test_loaded_sc_sd_system_then_edited_is_invalid( + self, memory_type, base_nickname, edited_nickname + ): + """Loaded SC/SD system nickname is valid, but edited variant should fail.""" + addr_key = get_addr_key(memory_type, 1) + data_source = MockDataSource( + { + addr_key: AddressRow( + memory_type=memory_type, + address=1, + nickname=base_nickname, + ) + } + ) + store = AddressStore(data_source) + store.load_initial_data() + + with store.edit_session("Edit loaded SC/SD system nickname") as session: + session.set_field(addr_key, "nickname", edited_nickname) + + row = store.visible_state[addr_key] + assert row.nickname_valid is False + assert row.nickname_error != "" + assert row.is_valid is False + + def test_loaded_x_io_nickname_stays_valid(self): + """Loaded X _IO nickname should be allowed as PLC-generated.""" + addr_key = get_addr_key("X", 1) + data_source = MockDataSource( + { + addr_key: AddressRow( + memory_type="X", + address=1, + nickname="_IO1_Module_Error", + ) + } + ) + store = AddressStore(data_source) + store.load_initial_data() + + row = store.visible_state[addr_key] + assert row.nickname_valid is True + assert row.nickname_error == "" + assert row.is_valid is True + + def test_user_entered_x_io_nickname_is_invalid(self, store): + """User-entered X _IO nickname should be rejected.""" + addr_key = get_addr_key("X", 1) + with store.edit_session("Set X system nickname") as session: + session.set_field(addr_key, "nickname", "_IO1_Module_Error") + + row = store.visible_state[addr_key] + assert row.nickname_valid is False + assert "Cannot start with _" in row.nickname_error + assert row.is_valid is False + + def test_loaded_x_io_then_user_changes_nickname_is_invalid(self): + """Changing loaded _IO nickname to another _IO value should be rejected.""" + addr_key = get_addr_key("X", 1) + data_source = MockDataSource( + { + addr_key: AddressRow( + memory_type="X", + address=1, + nickname="_IO1_Module_Error", + ) + } + ) + store = AddressStore(data_source) + store.load_initial_data() + + with store.edit_session("Change loaded _IO nickname") as session: + session.set_field(addr_key, "nickname", "_IO2_Module_Error") + + row = store.visible_state[addr_key] + assert row.nickname_valid is False + assert "Cannot start with _" in row.nickname_error + assert row.is_valid is False + + def test_user_edit_clears_loaded_error_mask_for_nickname(self): + """Editing nickname should stop masking old loaded validation errors.""" + addr_key = get_addr_key("SC", 1) + data_source = MockDataSource( + { + addr_key: AddressRow( + memory_type="SC", + address=1, + nickname="log", # Reserved keyword + ) + } + ) + store = AddressStore(data_source) + store.load_initial_data() + + loaded_row = store.visible_state[addr_key] + assert loaded_row.loaded_with_error is True + assert loaded_row.nickname_valid is True + + with store.edit_session("Edit invalid loaded nickname") as session: + session.set_field(addr_key, "nickname", "_UserTyped") + + edited_row = store.visible_state[addr_key] + assert edited_row.loaded_with_error is False + assert edited_row.nickname_valid is False + assert "Cannot start with _" in edited_row.nickname_error + + class TestExternalDatabaseUpdate: """Tests for external database update handling.""" diff --git a/tests/test_app_csv_regression.py b/tests/test_app_csv_regression.py new file mode 100644 index 0000000..1103500 --- /dev/null +++ b/tests/test_app_csv_regression.py @@ -0,0 +1,109 @@ +"""Regression tests for app state transitions around CSV-only loading.""" + +from __future__ import annotations + +from types import SimpleNamespace +from unittest.mock import MagicMock + +from clicknick.app import ClickNickApp + + +class FakeVar: + """Minimal Tk-like variable for get/set in unit tests.""" + + def __init__(self, value: str = ""): + self._value = value + + def get(self) -> str: + return self._value + + def set(self, value: str) -> None: + self._value = value + + +class FakeCsvDataSource: + """Minimal CSV data source stand-in for app.load_csv tests.""" + + def __init__(self, csv_path: str): + self.csv_path = csv_path + + +class FakeAddressStore: + """Minimal AddressStore stand-in for app.load_csv tests.""" + + def __init__(self, data_source): + self.data_source = data_source + self.load_initial_data = MagicMock() + self.start_file_monitoring = MagicMock() + + +def _make_app_stub() -> ClickNickApp: + app = ClickNickApp.__new__(ClickNickApp) + app._shared_address_data = None + app._shared_data_source_path = None + app._update_status = MagicMock() + app._update_window_title = MagicMock() + app.start_monitoring = MagicMock() + app.stop_monitoring = MagicMock() + app.root = MagicMock() + app.monitoring = False + app.using_database = True + return app + + +def test_load_csv_with_stale_connection_skips_monitoring_and_clears_connection(monkeypatch): + monkeypatch.setattr("clicknick.data.data_source.CsvDataSource", FakeCsvDataSource) + monkeypatch.setattr("clicknick.app.AddressStore", FakeAddressStore) + + app = _make_app_stub() + app.csv_path_var = FakeVar("C:/tmp/NicknameExport.csv") + app.selected_instance_var = FakeVar("OldProject.ckp") + app.connected_click_pid = 1234 + app.connected_click_hwnd = 5678 + app.connected_click_filename = "OldProject.ckp" + app.detector = SimpleNamespace(check_window_exists=lambda _pid: False) + app.settings = SimpleNamespace(sort_by_nickname=False) + app.nickname_manager = SimpleNamespace( + set_shared_data=MagicMock(), + apply_sorting=MagicMock(), + ) + + result = app.load_csv() + + assert result is True + assert app.start_monitoring.call_count == 0 + assert app.connected_click_pid is None + assert app.connected_click_hwnd is None + assert app.connected_click_filename is None + assert app.selected_instance_var.get() == "" + assert app._shared_data_source_path == "C:/tmp/NicknameExport.csv" + app.nickname_manager.set_shared_data.assert_called_once_with(app._shared_address_data) + + +def test_handle_window_closed_preserves_csv_loaded_store(): + app = _make_app_stub() + csv_path = "C:/tmp/NicknameExport.csv" + store = SimpleNamespace(force_close_all_windows=MagicMock()) + + app._shared_data_source_path = csv_path + app._shared_address_data = store + app.nickname_manager = SimpleNamespace(set_shared_data=MagicMock()) + app.selected_instance_var = FakeVar("MyProject.ckp") + app.connected_click_pid = 111 + app.connected_click_hwnd = 222 + app.connected_click_filename = "MyProject.ckp" + app.refresh_click_instances = MagicMock() + app.root = SimpleNamespace(after=MagicMock()) + + app._handle_window_closed() + + app.stop_monitoring.assert_called_once_with(update_status=False) + assert app._shared_data_source_path == csv_path + assert app._shared_address_data is store + app.nickname_manager.set_shared_data.assert_not_called() + store.force_close_all_windows.assert_not_called() + assert app.connected_click_pid is None + assert app.connected_click_hwnd is None + assert app.connected_click_filename is None + assert app.selected_instance_var.get() == "" + app.root.after.assert_called_once_with(2000, app.refresh_click_instances) diff --git a/tests/test_block_logic.py b/tests/test_block_logic.py new file mode 100644 index 0000000..226b5f0 --- /dev/null +++ b/tests/test_block_logic.py @@ -0,0 +1,134 @@ +"""Tests for nav_window.block_logic tree building.""" + +from pyclickplc.blocks import BlockRange + +from clicknick.models.address_row import AddressRow +from clicknick.views.nav_window.block_logic import build_block_tree + + +def _make_rows(addresses: list[tuple[str, int]]) -> list[AddressRow]: + return [AddressRow(memory_type=memory_type, address=address) for memory_type, address in addresses] + + +class TestBuildBlockTree: + def test_non_udt_and_named_array_stay_flat(self): + rows = _make_rows([("X", 1), ("X", 2)]) + ranges = [ + BlockRange(0, 0, "Simple", None, "X"), + BlockRange(1, 1, "Base:named_array(2,3)", None, "X"), + ] + + nodes = build_block_tree(ranges, rows, sort_alphabetically=False) + + assert [node.text for node in nodes] == [ + f"Simple ({rows[0].display_address})", + f"Base:named_array(2,3) ({rows[1].display_address})", + ] + assert all(not node.children for node in nodes) + + def test_udt_names_group_under_base_parent(self): + rows = _make_rows([("X", 1), ("X", 2), ("X", 3)]) + ranges = [ + BlockRange(0, 0, "Alarm.id", None, "X"), + BlockRange(1, 1, "Alarm.On", None, "X"), + BlockRange(2, 2, "Pump", None, "X"), + ] + + nodes = build_block_tree(ranges, rows, sort_alphabetically=False) + + assert len(nodes) == 2 + alarm = nodes[0] + assert alarm.is_group + assert alarm.text == "Alarm" + assert [child.text for child in alarm.children] == [ + f"id ({rows[0].display_address})", + f"On ({rows[1].display_address})", + ] + assert alarm.addresses == (("X", 1), ("X", 2)) + + pump = nodes[1] + assert not pump.is_group + assert pump.text == f"Pump ({rows[2].display_address})" + + def test_unsorted_top_level_and_child_order_follow_first_occurrence(self): + rows = _make_rows([("X", 1), ("X", 2), ("X", 3), ("X", 4), ("X", 5)]) + ranges = [ + BlockRange(0, 0, "FlatA", None, "X"), + BlockRange(1, 1, "B.z", None, "X"), + BlockRange(2, 2, "FlatB", None, "X"), + BlockRange(3, 3, "B.a", None, "X"), + BlockRange(4, 4, "A.x", None, "X"), + ] + + nodes = build_block_tree(ranges, rows, sort_alphabetically=False) + + assert [node.text for node in nodes] == [ + f"FlatA ({rows[0].display_address})", + "B", + f"FlatB ({rows[2].display_address})", + "A", + ] + b_children = [child.text for child in nodes[1].children] + assert b_children == [ + f"z ({rows[1].display_address})", + f"a ({rows[3].display_address})", + ] + + def test_sorted_mode_sorts_top_level_and_children(self): + rows = _make_rows([("X", 1), ("X", 2), ("X", 3), ("X", 4), ("X", 5)]) + ranges = [ + BlockRange(0, 0, "Beta.z", None, "X"), + BlockRange(1, 1, "Beta.a", None, "X"), + BlockRange(2, 2, "Alpha.x", None, "X"), + BlockRange(3, 3, "Zulu", None, "X"), + BlockRange(4, 4, "Echo", None, "X"), + ] + + nodes = build_block_tree(ranges, rows, sort_alphabetically=True) + + assert [node.text for node in nodes] == [ + "Alpha", + "Beta", + f"Echo ({rows[4].display_address})", + f"Zulu ({rows[3].display_address})", + ] + beta = nodes[1] + assert [child.text for child in beta.children] == [ + f"a ({rows[1].display_address})", + f"z ({rows[0].display_address})", + ] + + def test_duplicate_udt_fields_keep_individual_children(self): + rows = _make_rows([("X", 1), ("X", 2), ("X", 3)]) + ranges = [ + BlockRange(0, 0, "Alarm.id", None, "X"), + BlockRange(1, 1, "Alarm.id", None, "X"), + BlockRange(2, 2, "Alarm.On", None, "X"), + ] + + nodes = build_block_tree(ranges, rows, sort_alphabetically=False) + + assert len(nodes) == 1 + alarm = nodes[0] + assert [child.text for child in alarm.children] == [ + f"id ({rows[0].display_address})", + f"id ({rows[1].display_address})", + f"On ({rows[2].display_address})", + ] + assert alarm.children[0].addresses == (("X", 1),) + assert alarm.children[1].addresses == (("X", 2),) + + def test_parent_aggregation_dedupes_addresses_in_child_order(self): + rows = _make_rows([("X", 1), ("X", 2), ("X", 3)]) + ranges = [ + BlockRange(0, 1, "Alarm.id", None, "X"), + BlockRange(1, 2, "Alarm.On", None, "X"), + ] + + nodes = build_block_tree(ranges, rows, sort_alphabetically=False) + + assert len(nodes) == 1 + alarm = nodes[0] + assert alarm.addresses == (("X", 1), ("X", 2), ("X", 3)) + assert alarm.children[0].addresses == (("X", 1), ("X", 2)) + assert alarm.children[1].addresses == (("X", 2), ("X", 3)) diff --git a/tests/test_block_service.py b/tests/test_block_service.py index 17b70a9..4a05f2c 100644 --- a/tests/test_block_service.py +++ b/tests/test_block_service.py @@ -3,7 +3,11 @@ import pytest from clicknick.data.address_store import AddressStore -from clicknick.services.block_service import compute_all_block_ranges +from clicknick.services.block_service import ( + compute_all_block_ranges, + get_all_block_names, + is_block_name_available, +) from clicknick.views.address_editor.view_builder import build_unified_view @@ -200,3 +204,324 @@ def test_compute_block_colors_map(store): # Rows outside block should not be in map assert 6 not in color_map assert 7 not in color_map + + +# ============================================================================= +# Tests for get_all_block_names and is_block_name_available +# ============================================================================= + + +def test_get_all_block_names_empty(store): + """Test get_all_block_names returns empty set when no blocks.""" + view = store.get_unified_view() + names = get_all_block_names(view.rows[:10]) + assert names == set() + + +def test_get_all_block_names_finds_blocks(store): + """Test get_all_block_names finds all block names.""" + view = store.get_unified_view() + rows = view.rows[:20] + + # Add some blocks + with store.edit_session("Add blocks") as session: + session.set_field(rows[0].addr_key, "comment", "") + session.set_field(rows[5].addr_key, "comment", "") + session.set_field(rows[10].addr_key, "comment", "") + session.set_field(rows[15].addr_key, "comment", "") + + # Refresh view + view = store.get_unified_view() + names = get_all_block_names(view.rows[:20]) + + assert names == {"BlockA", "BlockB"} + + +def test_get_all_block_names_includes_self_closing(store): + """Test get_all_block_names includes self-closing blocks.""" + view = store.get_unified_view() + rows = view.rows[:10] + + with store.edit_session("Add self-closing") as session: + session.set_field(rows[3].addr_key, "comment", "") + + view = store.get_unified_view() + names = get_all_block_names(view.rows[:10]) + + assert "SingleBlock" in names + + +def test_is_block_name_available_true(store): + """Test is_block_name_available returns True when name is not in use.""" + view = store.get_unified_view() + assert is_block_name_available("NewBlock", view.rows[:10]) + + +def test_is_block_name_available_false(store): + """Test is_block_name_available returns False when name exists.""" + view = store.get_unified_view() + rows = view.rows[:10] + + with store.edit_session("Add block") as session: + session.set_field(rows[0].addr_key, "comment", "") + session.set_field(rows[5].addr_key, "comment", "") + + view = store.get_unified_view() + assert not is_block_name_available("ExistingBlock", view.rows[:10]) + + +def test_is_block_name_available_with_exclusion(store): + """Test is_block_name_available respects exclude_addr_keys.""" + view = store.get_unified_view() + rows = view.rows[:10] + + with store.edit_session("Add block") as session: + session.set_field(rows[0].addr_key, "comment", "") + session.set_field(rows[5].addr_key, "comment", "") + + view = store.get_unified_view() + rows = view.rows[:10] + + # Without exclusion, name is not available + assert not is_block_name_available("MyBlock", rows) + + # With exclusion of the rows containing the tags, name IS available + # (useful when renaming - exclude the rows being renamed) + exclude = {rows[0].addr_key, rows[5].addr_key} + assert is_block_name_available("MyBlock", rows, exclude_addr_keys=exclude) + + +def test_is_block_name_available_case_sensitive(store): + """Test that block name check is case-sensitive.""" + view = store.get_unified_view() + rows = view.rows[:10] + + with store.edit_session("Add block") as session: + session.set_field(rows[0].addr_key, "comment", "") + + view = store.get_unified_view() + + # Exact case match should NOT be available + assert not is_block_name_available("MyBlock", view.rows[:10]) + + # Different case should be available (case-sensitive) + assert is_block_name_available("myblock", view.rows[:10]) + assert is_block_name_available("MYBLOCK", view.rows[:10]) + + +# ============================================================================= +# Tests for duplicate block name validation in AddressStore +# ============================================================================= + + +def test_duplicate_block_name_validation_marks_row_invalid(store): + """Test that creating duplicate block names marks row as invalid.""" + view = store.get_unified_view() + rows = view.rows[:20] + + # Create first block + with store.edit_session("Add first block") as session: + session.set_field(rows[0].addr_key, "comment", "") + session.set_field(rows[5].addr_key, "comment", "") + + # Try to create second block with same name + with store.edit_session("Add duplicate block") as session: + session.set_field(rows[10].addr_key, "comment", "") + + # Check the duplicate row is marked invalid + row10 = store.visible_state[rows[10].addr_key] + assert not row10.comment_valid + assert "Duplicate block" in row10.comment_error + + +def test_duplicate_block_name_validation_allows_same_block(store): + """Test that a block's own opening/closing tags don't trigger duplicate error.""" + view = store.get_unified_view() + rows = view.rows[:10] + + # Create a block + with store.edit_session("Add block") as session: + session.set_field(rows[0].addr_key, "comment", "") + session.set_field(rows[5].addr_key, "comment", "") + + # Both tags should be valid (they're the same block, not duplicates) + row0 = store.visible_state[rows[0].addr_key] + + # Note: The opening and closing tags of the same block should be valid + # because they ARE the same block - but with current logic they will + # show as duplicates of each other. This is expected behavior for Phase 0a. + # The simplification assumes unique names globally, so paired open/close + # tags are allowed to have the same name. + assert row0.comment_valid + # The closing tag will see the opening tag as duplicate - this is expected + # In a proper block, both use the same name and that's valid + # The duplicate check excludes the current row, so the opening tag is valid + # But the closing tag sees the opening tag as existing + + +def test_is_duplicate_block_name_method(store): + """Test the is_duplicate_block_name method directly.""" + view = store.get_unified_view() + rows = view.rows[:10] + + # Initially, no duplicates + assert not store.is_duplicate_block_name("NewBlock", rows[0].addr_key) + + # Create a block + with store.edit_session("Add block") as session: + session.set_field(rows[0].addr_key, "comment", "") + + # Now "ExistingBlock" is a duplicate for other rows + assert store.is_duplicate_block_name("ExistingBlock", rows[5].addr_key) + + # But not for the row that has it (excluded) + assert not store.is_duplicate_block_name("ExistingBlock", rows[0].addr_key) + + +def test_closing_tag_not_marked_as_duplicate(store): + """Test that closing tags are NOT marked as duplicates of their opening tag.""" + view = store.get_unified_view() + rows = view.rows[:10] + + # Create a block with opening and closing tags + with store.edit_session("Add block") as session: + session.set_field(rows[0].addr_key, "comment", "") + session.set_field(rows[5].addr_key, "comment", "") + + # Both opening and closing tags should be valid (not duplicates) + row0 = store.visible_state[rows[0].addr_key] + row5 = store.visible_state[rows[5].addr_key] + + assert row0.comment_valid, f"Opening tag should be valid, got error: {row0.comment_error}" + assert row5.comment_valid, f"Closing tag should be valid, got error: {row5.comment_error}" + + +def test_multiple_blocks_same_name_marked_duplicate(store): + """Test that multiple opening tags with same name ARE marked as duplicates.""" + view = store.get_unified_view() + rows = view.rows[:20] + + # Create two separate blocks with the same name + with store.edit_session("Add first block") as session: + session.set_field(rows[0].addr_key, "comment", "") + session.set_field(rows[3].addr_key, "comment", "") + + # Add second block with same name + with store.edit_session("Add second block") as session: + session.set_field(rows[10].addr_key, "comment", "") + session.set_field(rows[15].addr_key, "comment", "") + + # First block should be valid + row0 = store.visible_state[rows[0].addr_key] + row3 = store.visible_state[rows[3].addr_key] + assert row0.comment_valid, "First opening tag should be valid" + assert row3.comment_valid, "First closing tag should be valid" + + # Second block's OPENING tag should be marked as duplicate + row10 = store.visible_state[rows[10].addr_key] + assert not row10.comment_valid, "Second opening tag should be invalid (duplicate)" + assert "Duplicate" in row10.comment_error + + # Second block's closing tag should still be valid (closing tags aren't duplicates) + row15 = store.visible_state[rows[15].addr_key] + assert row15.comment_valid, "Second closing tag should be valid" + + +# ============================================================================= +# Tests for _D suffix transformation (Phase 0b) +# ============================================================================= + + +def test_transform_block_name_t_to_td(): + """Test T → TD adds _D suffix.""" + from clicknick.services.block_service import _transform_block_name_for_pair + + # Base name gets _D suffix + assert _transform_block_name_for_pair("Pumps", "T", "TD") == "Pumps_D" + assert _transform_block_name_for_pair("Motors", "CT", "CTD") == "Motors_D" + + # Already has _D suffix - keep it + assert _transform_block_name_for_pair("Pumps_D", "T", "TD") == "Pumps_D" + + +def test_transform_block_name_td_to_t(): + """Test TD → T removes _D suffix.""" + from clicknick.services.block_service import _transform_block_name_for_pair + + # Remove _D suffix + assert _transform_block_name_for_pair("Pumps_D", "TD", "T") == "Pumps" + assert _transform_block_name_for_pair("Motors_D", "CTD", "CT") == "Motors" + + # No _D suffix - keep as is + assert _transform_block_name_for_pair("Pumps", "TD", "T") == "Pumps" + + +def test_interleaved_pair_sync_adds_suffix(store): + """Test that T → TD block sync adds _D suffix.""" + # Find T1 and TD1 rows + from pyclickplc.addresses import get_addr_key + + t1_key = get_addr_key("T", 1) + td1_key = get_addr_key("TD", 1) + + # Create a block on T1 + with store.edit_session("Add block on T") as session: + session.set_field(t1_key, "comment", "") + + # TD1 should have the block tag with _D suffix + td1_row = store.visible_state[td1_key] + assert "" in td1_row.comment + + +def test_interleaved_pair_sync_removes_suffix(store): + """Test that TD → T block sync removes _D suffix.""" + from pyclickplc.addresses import get_addr_key + + t1_key = get_addr_key("T", 1) + td1_key = get_addr_key("TD", 1) + + # Create a block on TD1 with _D suffix + with store.edit_session("Add block on TD") as session: + session.set_field(td1_key, "comment", "") + + # T1 should have the block tag without _D suffix + t1_row = store.visible_state[t1_key] + assert "" in t1_row.comment + + +def test_interleaved_pair_sync_closing_tag_with_suffix(store): + """Test that closing tags also get _D suffix transformation.""" + from pyclickplc.addresses import get_addr_key + + t1_key = get_addr_key("T", 1) + t5_key = get_addr_key("T", 5) + td1_key = get_addr_key("TD", 1) + td5_key = get_addr_key("TD", 5) + + # Create a block on T1-T5 + with store.edit_session("Add block on T") as session: + session.set_field(t1_key, "comment", "") + session.set_field(t5_key, "comment", "") + + # TD should have corresponding tags with _D suffix + td1_row = store.visible_state[td1_key] + td5_row = store.visible_state[td5_key] + + assert "" in td5_row.comment # Closing tag + + +def test_interleaved_pair_sync_ct_ctd(store): + """Test that CT → CTD block sync works like T → TD.""" + from pyclickplc.addresses import get_addr_key + + ct1_key = get_addr_key("CT", 1) + ctd1_key = get_addr_key("CTD", 1) + + # Create a block on CT1 + with store.edit_session("Add block on CT") as session: + session.set_field(ct1_key, "comment", "") + + # CTD1 should have the block tag with _D suffix + ctd1_row = store.visible_state[ctd1_key] + assert "" in ctd1_row.comment diff --git a/tests/test_cdv_file.py b/tests/test_cdv_file.py index 67d3f4a..ac4b185 100644 --- a/tests/test_cdv_file.py +++ b/tests/test_cdv_file.py @@ -1,303 +1,60 @@ -"""Tests for CDV file I/O.""" - -import tempfile -from pathlib import Path - -import pytest - -from clicknick.models.dataview_row import ( - MAX_DATAVIEW_ROWS, - DataviewRow, - TypeCode, - create_empty_dataview, +"""Tests for ClickNick CDV file helper wrappers.""" + +from clicknick.models.dataview_row import DataType, create_empty_dataview +from clicknick.views.dataview_editor.cdv_file import ( + check_cdv_files, + get_dataview_folder, + list_cdv_files, + save_cdv, ) -from clicknick.views.dataview_editor.cdv_file import export_cdv, load_cdv, save_cdv - -# Test files in the tests directory -TEST_DIR = Path(__file__).parent -DATAVIEW1_PATH = TEST_DIR / "DataView1.cdv" -DATAVIEW1_WITH_NEW_VALUES_PATH = TEST_DIR / "DataView1WithNewValues.cdv" - - -class TestLoadCdv: - """Tests for loading CDV files.""" - - def test_load_dataview1(self): - """Test loading DataView1.cdv (no new values).""" - rows, has_new_values, header = load_cdv(DATAVIEW1_PATH) - - assert len(rows) == MAX_DATAVIEW_ROWS - assert has_new_values is False - assert header == "0,0,0" # Original header preserved - - # Check first few rows - assert rows[0].address == "X001" - assert rows[0].type_code == TypeCode.BIT - - assert rows[1].address == "X002" - assert rows[1].type_code == TypeCode.BIT - - # Check a DS row - ds_row = next(r for r in rows if r.address == "DS1") - assert ds_row.type_code == TypeCode.INT - - # Check a DF row - df_row = next(r for r in rows if r.address == "DF1") - assert df_row.type_code == TypeCode.FLOAT - - # Check a TXT row - txt_row = next(r for r in rows if r.address == "TXT1") - assert txt_row.type_code == TypeCode.TXT - - # Check empty rows exist - empty_count = sum(1 for r in rows if r.is_empty) - assert empty_count > 0 - - def test_load_dataview1_with_new_values(self): - """Test loading DataView1WithNewValues.cdv.""" - rows, has_new_values, header = load_cdv(DATAVIEW1_WITH_NEW_VALUES_PATH) - - assert len(rows) == MAX_DATAVIEW_ROWS - assert has_new_values is True - assert header == "-1,0,0" # Original header preserved - - # Check rows with new values - assert rows[0].address == "X001" - assert rows[0].new_value == "1" # BIT on - - assert rows[1].address == "X002" - assert rows[1].new_value == "0" # BIT off - - # DS with new value - ds1_row = next(r for r in rows if r.address == "DS1") - assert ds1_row.new_value == "1" - - # DF with new value (float stored as int representation) - df1_row = next(r for r in rows if r.address == "DF1") - assert df1_row.new_value # Should have a value - - def test_load_nonexistent_file(self): - """Test loading a file that doesn't exist.""" - with pytest.raises(FileNotFoundError): - load_cdv(TEST_DIR / "nonexistent.cdv") - - -class TestSaveCdv: - """Tests for saving CDV files.""" - - def test_save_empty_dataview(self): - """Test saving an empty dataview.""" - rows = create_empty_dataview() - - with tempfile.NamedTemporaryFile(suffix=".cdv", delete=False) as f: - temp_path = Path(f.name) - - try: - save_cdv(temp_path, rows, has_new_values=False) - - # Load it back - loaded_rows, has_new_values, _header = load_cdv(temp_path) - - assert len(loaded_rows) == MAX_DATAVIEW_ROWS - assert has_new_values is False - assert all(r.is_empty for r in loaded_rows) - finally: - temp_path.unlink(missing_ok=True) - - def test_save_with_addresses(self): - """Test saving a dataview with addresses.""" - rows = create_empty_dataview() - rows[0].address = "X001" - rows[0].type_code = TypeCode.BIT - rows[1].address = "DS100" - rows[1].type_code = TypeCode.INT - - with tempfile.NamedTemporaryFile(suffix=".cdv", delete=False) as f: - temp_path = Path(f.name) - - try: - save_cdv(temp_path, rows, has_new_values=False) - - # Load it back - loaded_rows, has_new_values, _header = load_cdv(temp_path) - - assert loaded_rows[0].address == "X001" - assert loaded_rows[0].type_code == TypeCode.BIT - assert loaded_rows[1].address == "DS100" - assert loaded_rows[1].type_code == TypeCode.INT - assert has_new_values is False - finally: - temp_path.unlink(missing_ok=True) - - def test_save_with_new_values(self): - """Test saving a dataview with new values.""" - rows = create_empty_dataview() - rows[0].address = "X001" - rows[0].type_code = TypeCode.BIT - rows[0].new_value = "1" - rows[1].address = "DS100" - rows[1].type_code = TypeCode.INT - rows[1].new_value = "42" - - with tempfile.NamedTemporaryFile(suffix=".cdv", delete=False) as f: - temp_path = Path(f.name) - - try: - save_cdv(temp_path, rows, has_new_values=True) - - # Load it back - loaded_rows, has_new_values, _header = load_cdv(temp_path) - - assert loaded_rows[0].address == "X001" - assert loaded_rows[0].new_value == "1" - assert loaded_rows[1].address == "DS100" - assert loaded_rows[1].new_value == "42" - assert has_new_values is True - finally: - temp_path.unlink(missing_ok=True) - - def test_fewer_rows_pads_to_100(self): - """Test that saving with fewer rows pads to 100.""" - rows = [DataviewRow(address="X001")] # Only 1 row - - with tempfile.NamedTemporaryFile(suffix=".cdv", delete=False) as f: - temp_path = Path(f.name) - - try: - # Should not raise - pads with empty rows - save_cdv(temp_path, rows, has_new_values=False) - - # Verify file was saved and has correct structure - loaded_rows, _, _header = load_cdv(temp_path) - assert len(loaded_rows) == 100 - assert loaded_rows[0].address == "X001" - assert all(r.is_empty for r in loaded_rows[1:]) - finally: - temp_path.unlink(missing_ok=True) - - def test_overflow_rows_not_saved(self): - """Test that overflow rows (>100) are not saved.""" - rows = create_empty_dataview() - rows[0].address = "X001" - - # Add 5 overflow rows - for i in range(5): - overflow_row = DataviewRow(address=f"Y{i:03d}") - rows.append(overflow_row) - - assert len(rows) == 105 - - with tempfile.NamedTemporaryFile(suffix=".cdv", delete=False) as f: - temp_path = Path(f.name) - - try: - save_cdv(temp_path, rows, has_new_values=False) - - # Verify only first 100 rows saved - loaded_rows, _, _header = load_cdv(temp_path) - assert len(loaded_rows) == 100 - assert loaded_rows[0].address == "X001" - # Y000-Y004 should NOT be in loaded rows (they were overflow) - addresses = [r.address for r in loaded_rows if r.address] - assert "Y000" not in addresses - finally: - temp_path.unlink(missing_ok=True) - - -class TestRoundTrip: - """Tests for round-trip loading and saving.""" - - def test_roundtrip_dataview1(self): - """Test round-trip load/save of DataView1.cdv.""" - original_rows, original_has_new_values, original_header = load_cdv(DATAVIEW1_PATH) - - with tempfile.NamedTemporaryFile(suffix=".cdv", delete=False) as f: - temp_path = Path(f.name) - - try: - save_cdv(temp_path, original_rows, original_has_new_values, original_header) - loaded_rows, loaded_has_new_values, loaded_header = load_cdv(temp_path) - - assert loaded_has_new_values == original_has_new_values - assert loaded_header == original_header - - for i, (orig, loaded) in enumerate(zip(original_rows, loaded_rows, strict=True)): - assert orig.address == loaded.address, f"Row {i} address mismatch" - assert orig.type_code == loaded.type_code, f"Row {i} type_code mismatch" - assert orig.new_value == loaded.new_value, f"Row {i} new_value mismatch" - finally: - temp_path.unlink(missing_ok=True) - - def test_roundtrip_dataview1_with_new_values(self): - """Test round-trip load/save of DataView1WithNewValues.cdv.""" - original_rows, original_has_new_values, original_header = load_cdv( - DATAVIEW1_WITH_NEW_VALUES_PATH - ) - - with tempfile.NamedTemporaryFile(suffix=".cdv", delete=False) as f: - temp_path = Path(f.name) - - try: - save_cdv(temp_path, original_rows, original_has_new_values, original_header) - loaded_rows, loaded_has_new_values, loaded_header = load_cdv(temp_path) - - assert loaded_has_new_values == original_has_new_values - assert loaded_header == original_header - - for i, (orig, loaded) in enumerate(zip(original_rows, loaded_rows, strict=True)): - assert orig.address == loaded.address, f"Row {i} address mismatch" - assert orig.type_code == loaded.type_code, f"Row {i} type_code mismatch" - assert orig.new_value == loaded.new_value, f"Row {i} new_value mismatch" - finally: - temp_path.unlink(missing_ok=True) -class TestExtendedHeader: - """Tests for CDV files with extended headers (column widths).""" +class TestDataviewFolderDiscovery: + """Tests for DataView folder discovery helpers.""" - def test_extended_header_preserved(self): - """Test that extended headers with column widths are preserved on round-trip.""" - extended_header = "0,0,0,559,653,94,138,94,94,94,75,50,75,75,30,78,64" - rows = create_empty_dataview() - rows[0].address = "X001" - rows[0].type_code = TypeCode.BIT + def test_get_dataview_folder_invalid_path(self, tmp_path): + assert get_dataview_folder(tmp_path / "missing") is None - with tempfile.NamedTemporaryFile(suffix=".cdv", delete=False) as f: - temp_path = Path(f.name) + def test_get_dataview_folder_found(self, tmp_path): + project = tmp_path / "ProjectA" + dataview = project / "CLICK (00010A98)" / "DataView" + dataview.mkdir(parents=True) - try: - save_cdv(temp_path, rows, has_new_values=False, header=extended_header) - loaded_rows, has_new_values, loaded_header = load_cdv(temp_path) + found = get_dataview_folder(project) + assert found == dataview - assert loaded_header == extended_header - assert has_new_values is False - assert loaded_rows[0].address == "X001" - finally: - temp_path.unlink(missing_ok=True) + def test_list_cdv_files_sorted_and_filtered(self, tmp_path): + folder = tmp_path / "DataView" + folder.mkdir() + (folder / "zeta.cdv").write_text("", encoding="utf-8") + (folder / "Alpha.cdv").write_text("", encoding="utf-8") + (folder / "notes.txt").write_text("", encoding="utf-8") -class TestExportCdv: - """Tests for exporting CDV files.""" + files = list_cdv_files(folder) + assert [f.name for f in files] == ["Alpha.cdv", "zeta.cdv"] - def test_export_is_same_as_save(self): - """Test that export produces the same result as save.""" - rows = create_empty_dataview() - rows[0].address = "Y001" - rows[0].type_code = TypeCode.BIT + def test_check_cdv_files_counts_and_aggregation(self, tmp_path): + project = tmp_path / "MyProject" + dataview_dir = project / "CLICK (00010A98)" / "DataView" + dataview_dir.mkdir(parents=True) - with tempfile.NamedTemporaryFile(suffix=".cdv", delete=False) as f1: - save_path = Path(f1.name) - with tempfile.NamedTemporaryFile(suffix=".cdv", delete=False) as f2: - export_path = Path(f2.name) + rows_ok = create_empty_dataview() + rows_ok[0].address = "X001" + rows_ok[0].data_type = DataType.BIT + save_cdv(dataview_dir / "ok.cdv", rows_ok, has_new_values=False) - try: - save_cdv(save_path, rows, has_new_values=False) - export_cdv(export_path, rows, has_new_values=False) + rows_bad = create_empty_dataview() + rows_bad[0].address = "DS1" + rows_bad[0].data_type = DataType.BIT + save_cdv(dataview_dir / "bad.cdv", rows_bad, has_new_values=False) - save_content = save_path.read_bytes() - export_content = export_path.read_bytes() + issues, checked = check_cdv_files(project) + assert checked == 2 + assert len(issues) == 1 + assert "Data type mismatch" in issues[0] - assert save_content == export_content - finally: - save_path.unlink(missing_ok=True) - export_path.unlink(missing_ok=True) + def test_check_cdv_files_missing_folder(self, tmp_path): + issues, checked = check_cdv_files(tmp_path / "NoProject") + assert checked == 0 + assert issues == [] diff --git a/tests/test_data_source_mdb_csv.py b/tests/test_data_source_mdb_csv.py new file mode 100644 index 0000000..d87b42c --- /dev/null +++ b/tests/test_data_source_mdb_csv.py @@ -0,0 +1,58 @@ +"""Tests for read_mdb_csv in clicknick.data.data_source.""" + +from clicknick.data.data_source import read_mdb_csv + + +class TestReadMdbCsv: + def test_skips_empty_nickname_and_comment(self, tmp_path): + csv_path = tmp_path / "Address.csv" + csv_path.write_text( + "AddrKey,MemoryType,Address,DataType,Nickname,Use,InitialValue,Retentive,Comment\n" + "1,X,1,0,,,1,0,\n" + "2,X,2,0,Input2,1,0,0,Second input\n", + encoding="utf-8", + ) + + rows = read_mdb_csv(str(csv_path)) + assert len(rows) == 1 + only = next(iter(rows.values())) + assert only.memory_type == "X" + assert only.address == 2 + assert only.nickname == "Input2" + assert only.comment == "Second input" + + def test_invalid_rows_are_ignored(self, tmp_path): + csv_path = tmp_path / "Address.csv" + csv_path.write_text( + "AddrKey,MemoryType,Address,DataType,Nickname,Use,InitialValue,Retentive,Comment\n" + "1,ZZ,1,0,BadType,1,0,0,Bad\n" + "2,X,NotInt,0,BadAddr,1,0,0,Bad\n" + "3,DS,1,1,Good,1,123,1,Good row\n", + encoding="utf-8", + ) + + rows = read_mdb_csv(str(csv_path)) + assert len(rows) == 1 + row = next(iter(rows.values())) + assert row.memory_type == "DS" + assert row.address == 1 + assert row.initial_value == "123" + + def test_returns_addressrow_map_with_expected_fields(self, tmp_path): + csv_path = tmp_path / "Address.csv" + csv_path.write_text( + "AddrKey,MemoryType,Address,DataType,Nickname,Use,InitialValue,Retentive,Comment\n" + "100663297,DS,1,1,Temp,1,100,1,Temperature\n", + encoding="utf-8", + ) + + rows = read_mdb_csv(str(csv_path)) + assert len(rows) == 1 + + row = next(iter(rows.values())) + assert row.memory_type == "DS" + assert row.address == 1 + assert row.data_type == 1 + assert row.nickname == "Temp" + assert row.comment == "Temperature" + assert row.retentive is True diff --git a/tests/test_dataview_block_select.py b/tests/test_dataview_block_select.py new file mode 100644 index 0000000..8c101bb --- /dev/null +++ b/tests/test_dataview_block_select.py @@ -0,0 +1,20 @@ +"""Tests for Dataview block-select pairing guard behavior.""" + +from clicknick.views.dataview_editor.window import DataviewEditorWindow + + +class TestDataviewPairedPromptType: + def test_single_type_t_prompts_for_td(self): + result = DataviewEditorWindow._get_paired_prompt_type([("T", 1), ("T", 2)]) + assert result == ("T", "TD") + + def test_single_type_ct_prompts_for_ctd(self): + result = DataviewEditorWindow._get_paired_prompt_type([("CT", 1), ("CT", 2)]) + assert result == ("CT", "CTD") + + def test_mixed_types_do_not_prompt(self): + assert DataviewEditorWindow._get_paired_prompt_type([("T", 1), ("TD", 1)]) is None + assert DataviewEditorWindow._get_paired_prompt_type([("T", 1), ("X", 1)]) is None + + def test_non_timer_types_do_not_prompt(self): + assert DataviewEditorWindow._get_paired_prompt_type([("X", 1)]) is None diff --git a/tests/test_dataview_modbus_integration.py b/tests/test_dataview_modbus_integration.py new file mode 100644 index 0000000..6239156 --- /dev/null +++ b/tests/test_dataview_modbus_integration.py @@ -0,0 +1,475 @@ +"""Dataview Modbus integration tests (panel + window behavior).""" + +from __future__ import annotations + +from pathlib import Path +from types import SimpleNamespace +from unittest.mock import MagicMock + +from pyclickplc.addresses import normalize_address +from pyclickplc.dataview import DataViewFile, DataViewRecord + +from clicknick.views.dataview_editor.panel import ( + COL_NEW_VALUE, + DataviewPanel, +) +from clicknick.views.dataview_editor.window import ConnectionState, DataviewEditorWindow + + +class FakeVar: + def __init__(self, value=None): + self.value = value + + def get(self): + return self.value + + def set(self, value): + self.value = value + + +class FakeWidget: + def __init__(self): + self.last_config: dict[str, object] = {} + + def config(self, **kwargs): + self.last_config.update(kwargs) + + +class FakeMenu: + def __init__(self, *args, **kwargs): + self.commands: dict[str, object] = {} + self.entry_states: dict[str, str] = {} + + def add_cascade(self, label, menu): + self.commands[label] = menu + + def add_command(self, label, command=None, **kwargs): + self.commands[label] = command + + def add_separator(self): + return None + + def add_checkbutton(self, **kwargs): + return None + + def entryconfig(self, label, state): + self.entry_states[label] = state + + +class FakeSheet: + def __init__(self): + self.cells: dict[tuple[int, int], object] = {} + + def get_cell_data(self, row, col): + return self.cells.get((row, col), "") + + def set_cell_data(self, row, col, value): + self.cells[(row, col)] = value + + def create_checkbox(self, r, c, checked=False, text=""): + self.cells[(r, c)] = bool(checked) + + def delete_checkbox(self, row, col): + return None + + +class FakePanel: + def __init__( + self, + name: str, + poll_addresses: list[str] | None = None, + write_rows: list[tuple[str, object]] | None = None, + write_all_rows: list[tuple[str, object]] | None = None, + ): + self.name = name + self.file_path = Path(f"{name}.cdv") + self.is_dirty = False + self._poll_addresses = poll_addresses or [] + self._write_rows = write_rows or [] + self._write_all_rows = write_all_rows or [] + self.live_cleared = 0 + self.write_checks_cleared = 0 + self.last_live_values = None + self.destroyed = False + + def get_poll_addresses(self): + return list(self._poll_addresses) + + def clear_live_values(self): + self.live_cleared += 1 + + def update_live_values(self, values): + self.last_live_values = dict(values) + + def get_write_rows(self): + return list(self._write_rows) + + def get_write_all_rows(self): + return list(self._write_all_rows) + + def clear_write_checks(self): + self.write_checks_cleared += 1 + + def destroy(self): + self.destroyed = True + + +class FakeNotebook: + def __init__(self, panels: list[FakePanel]): + self._ids = [f"tab{i}" for i in range(len(panels))] + self._panel_map = dict(zip(self._ids, panels, strict=False)) + self._selected = self._ids[0] if self._ids else "" + + def tabs(self): + return list(self._ids) + + def nametowidget(self, tab_id): + return self._panel_map[tab_id] + + def select(self, value=None): + if value is None: + return self._selected + self._selected = value + return self._selected + + def index(self, arg): + if arg == "end": + return len(self._ids) + if arg in self._ids: + return self._ids.index(arg) + for i, tab_id in enumerate(self._ids): + if self._panel_map[tab_id] is arg: + return i + raise IndexError(arg) + + def set_current_index(self, idx: int): + self._selected = self._ids[idx] + + +class FakeModbusService: + def __init__(self): + self.connect_calls: list[tuple[str, int]] = [] + self.set_poll_calls: list[list[str]] = [] + self.clear_poll_calls = 0 + self.disconnect_calls = 0 + self.write_calls: list[list[tuple[str, object]]] = [] + self.write_result = [{"address": "X001", "ok": True, "error": None}] + + def connect(self, host, port): + self.connect_calls.append((host, port)) + + def set_poll_addresses(self, addresses): + self.set_poll_calls.append(list(addresses)) + + def clear_poll_addresses(self): + self.clear_poll_calls += 1 + + def disconnect(self): + self.disconnect_calls += 1 + + def write(self, values): + self.write_calls.append(list(values)) + return list(self.write_result) + + +def _make_panel_stub(rows: list[DataViewRecord]) -> DataviewPanel: + panel = DataviewPanel.__new__(DataviewPanel) + panel.rows = rows + panel.address_normalizer = normalize_address + panel.nickname_lookup = None + panel.on_modified = None + panel.on_addresses_changed = None + panel._write_checks = [False] * len(rows) + panel._live_values = {} + panel._suppress_notifications = False + panel._is_dirty = False + panel.sheet = FakeSheet() + panel._update_status = lambda: None + return panel + + +def _make_window_stub( + panels: list[FakePanel], + *, + connected: bool, +) -> DataviewEditorWindow: + window = DataviewEditorWindow.__new__(DataviewEditorWindow) + window.notebook = FakeNotebook(panels) + window._open_panels = {panel.file_path: panel for panel in panels} + window._active_panel = panels[0] if panels else None + window._pending_closed_panel = None + window._modbus = FakeModbusService() + window._modbus_busy = False + window._modbus_write_busy = False + window._connection_state = ( + ConnectionState.CONNECTED if connected else ConnectionState.DISCONNECTED + ) + window._modbus_host_var = FakeVar("127.0.0.1") + window._modbus_port_var = FakeVar("502") + window._modbus_toggle_var = FakeVar("Connect") + window._set_modbus_error_text = lambda text="": None + window.write_checked_button = FakeWidget() + window.write_all_button = FakeWidget() + window.modbus_connect_button = FakeWidget() + window.host_entry = FakeWidget() + window.port_entry = FakeWidget() + window.connection_menu = FakeMenu() + window._iter_open_panels = lambda: list(panels) + window.after = MagicMock(side_effect=lambda _delay, callback: callback()) + window._run_background = lambda target, *args: target(*args) + return window + + +def _row(address: str, value=None) -> DataViewRecord: + row = DataViewRecord(address=address, new_value=value) + row.update_data_type() + return row + + +def test_panel_get_poll_addresses_canonical_dedup_valid_only(): + panel = _make_panel_stub([_row("x1"), _row("X001"), _row(""), _row("bad"), _row("ds1")]) + assert panel.get_poll_addresses() == ["X001", "DS1"] + + +def test_panel_write_row_filters_writability_checks_and_nonempty_new_value(): + panel = _make_panel_stub([_row("X001", True), _row("ds1", 12), _row("XD0", 5)]) + panel._write_checks = [True, False, True] + + assert panel.get_write_rows() == [("X001", True)] + assert panel.get_write_all_rows() == [("X001", True), ("DS1", 12)] + + +def test_panel_new_value_validation_uses_dataview_helpers(monkeypatch): + panel = _make_panel_stub([_row("DS1", 1)]) + + calls = {"validate": 0, "parse": 0} + + def _validate(row, display): + calls["validate"] += 1 + return True, "" + + def _parse(display, data_type): + calls["parse"] += 1 + return SimpleNamespace(ok=True, value=42, error="") + + monkeypatch.setattr(DataViewFile, "validate_row_display", staticmethod(_validate)) + monkeypatch.setattr(DataViewFile, "try_parse_display", staticmethod(_parse)) + monkeypatch.setattr( + DataViewFile, "value_to_display", staticmethod(lambda value, data_type: "42") + ) + + event = SimpleNamespace(row=0, column=COL_NEW_VALUE, value=" 42 ") + assert panel._validate_edit(event) == "42" + assert calls == {"validate": 1, "parse": 1} + + +def test_panel_sheet_modified_updates_new_value_from_display_helper(monkeypatch): + panel = _make_panel_stub([_row("DS1", None)]) + panel.sheet.set_cell_data(0, COL_NEW_VALUE, "7") + called = {"set": 0} + + def _setter(row, display): + called["set"] += 1 + row.new_value = int(display) + + monkeypatch.setattr(DataViewFile, "set_row_new_value_from_display", staticmethod(_setter)) + event = SimpleNamespace(cells={"table": {(0, COL_NEW_VALUE): ""}}) + + panel._on_sheet_modified(event) + + assert called["set"] == 1 + assert panel.rows[0].new_value == 7 + + +def test_toolbar_toggle_routes_to_connect_disconnect_handlers(): + window = _make_window_stub([], connected=False) + window._connect_modbus = MagicMock() + window._disconnect_modbus = MagicMock() + + window._toggle_modbus_connection() + window._connect_modbus.assert_called_once() + + window._connection_state = ConnectionState.CONNECTED + window._toggle_modbus_connection() + window._disconnect_modbus.assert_called_once() + + +def test_connection_menu_uses_same_handlers(monkeypatch): + monkeypatch.setattr("clicknick.views.dataview_editor.window.tk.Menu", FakeMenu) + monkeypatch.setattr("clicknick.views.dataview_editor.window.tk.BooleanVar", FakeVar) + + window = DataviewEditorWindow.__new__(DataviewEditorWindow) + window.config = lambda **kwargs: None + window.bind = lambda *args, **kwargs: None + window._new_dataview = MagicMock() + window._open_file = MagicMock() + window._save_current = MagicMock() + window._export = MagicMock() + window._close_current_tab = MagicMock() + window._on_close = MagicMock() + window._clear_selected_rows = MagicMock() + window._refresh_nicknames = MagicMock() + window._refresh_file_list = MagicMock() + window._toggle_nav = MagicMock() + window._toggle_modbus_toolbar = MagicMock() + window._modbus_toolbar_var = FakeVar(True) + window._connect_modbus = MagicMock() + window._disconnect_modbus = MagicMock() + + window._create_menu() + + assert window.connection_menu.commands["Connect"] == window._connect_modbus + assert window.connection_menu.commands["Disconnect"] == window._disconnect_modbus + + +def test_connect_pushes_active_tab_poll_addresses_immediately(): + panel = FakePanel("one", poll_addresses=["X001", "DS1"]) + window = _make_window_stub([panel], connected=False) + service = FakeModbusService() + window._modbus = service + window._ensure_modbus_service = lambda: service + + window._connect_modbus() + + assert service.connect_calls == [("127.0.0.1", 502)] + assert service.set_poll_calls == [["X001", "DS1"]] + + +def test_connect_with_no_active_tab_clears_poll_addresses(): + window = _make_window_stub([], connected=False) + service = FakeModbusService() + window._modbus = service + window._ensure_modbus_service = lambda: service + + window._connect_modbus() + + assert service.connect_calls == [("127.0.0.1", 502)] + assert service.clear_poll_calls == 1 + + +def test_tab_change_clears_hidden_live_and_replaces_poll_list(): + panel1 = FakePanel("one", poll_addresses=["X001"]) + panel2 = FakePanel("two", poll_addresses=["DS1"]) + window = _make_window_stub([panel1, panel2], connected=True) + window.notebook.set_current_index(1) + + window._on_tab_changed(None) + + assert panel1.live_cleared == 1 + assert window._modbus.set_poll_calls[-1] == ["DS1"] + + +def test_address_edits_refresh_only_for_active_tab(): + panel1 = FakePanel("one", poll_addresses=["X001"]) + panel2 = FakePanel("two", poll_addresses=["DS1"]) + window = _make_window_stub([panel1, panel2], connected=True) + + window._on_panel_addresses_changed(panel1) + assert window._modbus.set_poll_calls == [["X001"]] + + window._on_panel_addresses_changed(panel2) + assert window._modbus.set_poll_calls == [["X001"]] + + +def test_closing_last_tab_clears_poll_addresses(): + panel = FakePanel("one", poll_addresses=["X001"]) + window = _make_window_stub([panel], connected=True) + window._pending_closed_panel = panel + window.notebook._ids = [] + window.notebook._panel_map = {} + window.notebook._selected = "" + + window._on_tab_closed(None) + + assert panel.live_cleared == 1 + assert panel.destroyed is True + assert window._modbus.clear_poll_calls == 1 + + +def test_modbus_callbacks_are_marshaled_via_after(): + window = _make_window_stub([], connected=False) + + window._on_modbus_state_callback(ConnectionState.CONNECTED, None) + window._on_modbus_values_callback({"X001": True}) + + assert window.after.call_count == 2 + for call in window.after.call_args_list: + assert call.args[0] == 0 + assert callable(call.args[1]) + + +def test_write_actions_send_payload_and_clear_checks_on_success(): + panel = FakePanel( + "one", + write_rows=[("X001", True)], + write_all_rows=[("DS1", 12)], + ) + window = _make_window_stub([panel], connected=True) + + window._write_checked() + window._write_all() + + assert window._modbus.write_calls == [[("X001", True)], [("DS1", 12)]] + assert panel.write_checks_cleared == 2 + + +def test_disconnect_dispatches_work_to_background_worker(): + panel = FakePanel("one") + window = _make_window_stub([panel], connected=True) + + scheduled = {} + + def _capture(target, *args): + scheduled["target"] = target + scheduled["args"] = args + + window._run_background = _capture + + window._disconnect_modbus() + + assert window._modbus.disconnect_calls == 0 + assert window._modbus_busy is True + assert panel.live_cleared == 1 + assert callable(scheduled["target"]) + assert scheduled["args"] == () + + scheduled["target"](*scheduled["args"]) + assert window._modbus.disconnect_calls == 1 + assert window._modbus_busy is False + + +def test_write_dispatches_work_to_background_worker(): + panel = FakePanel("one", write_rows=[("X001", True)]) + window = _make_window_stub([panel], connected=True) + + scheduled = {} + + def _capture(target, *args): + scheduled["target"] = target + scheduled["args"] = args + + window._run_background = _capture + + window._write_checked() + + assert window._modbus.write_calls == [] + assert window._modbus_write_busy is True + assert callable(scheduled["target"]) + assert scheduled["args"] == () + + scheduled["target"](*scheduled["args"]) + assert window._modbus.write_calls == [[("X001", True)]] + assert window._modbus_write_busy is False + + +def test_disconnect_clears_poll_and_live_values(): + panel1 = FakePanel("one") + panel2 = FakePanel("two") + window = _make_window_stub([panel1, panel2], connected=True) + + window._disconnect_modbus() + + assert window._modbus.clear_poll_calls >= 1 + assert window._modbus.disconnect_calls == 1 + assert panel1.live_cleared == 1 + assert panel2.live_cleared == 1 diff --git a/tests/test_dataview_model.py b/tests/test_dataview_model.py deleted file mode 100644 index dd2ebf6..0000000 --- a/tests/test_dataview_model.py +++ /dev/null @@ -1,396 +0,0 @@ -"""Tests for Dataview model.""" - -from clicknick.models.dataview_row import ( - MAX_DATAVIEW_ROWS, - WRITABLE_SC, - WRITABLE_SD, - DataviewRow, - TypeCode, - create_empty_dataview, - display_to_storage, - get_type_code_for_address, - is_address_writable, - parse_address, - storage_to_display, -) - - -class TestParseAddress: - """Tests for parse_address function.""" - - def test_parse_x_address(self): - """Test parsing X addresses.""" - assert parse_address("X001") == ("X", "001") - assert parse_address("X1") == ("X", "1") - assert parse_address("X816") == ("X", "816") - - def test_parse_y_address(self): - """Test parsing Y addresses.""" - assert parse_address("Y001") == ("Y", "001") - assert parse_address("y001") == ("Y", "001") # Case insensitive - - def test_parse_ds_address(self): - """Test parsing DS addresses.""" - assert parse_address("DS1") == ("DS", "1") - assert parse_address("DS4500") == ("DS", "4500") - - def test_parse_dd_address(self): - """Test parsing DD addresses.""" - assert parse_address("DD1") == ("DD", "1") - assert parse_address("DD1000") == ("DD", "1000") - - def test_parse_xd_address(self): - """Test parsing XD addresses with upper byte suffix.""" - assert parse_address("XD0") == ("XD", "0") - assert parse_address("XD0u") == ("XD", "0u") - assert parse_address("XD8") == ("XD", "8") - - def test_parse_txt_address(self): - """Test parsing TXT addresses.""" - assert parse_address("TXT1") == ("TXT", "1") - assert parse_address("TXT1000") == ("TXT", "1000") - - def test_parse_ctd_address(self): - """Test parsing CTD addresses.""" - assert parse_address("CTD1") == ("CTD", "1") - assert parse_address("CTD250") == ("CTD", "250") - - def test_parse_empty(self): - """Test parsing empty address.""" - assert parse_address("") is None - assert parse_address(" ") is None - - def test_parse_invalid(self): - """Test parsing invalid addresses.""" - assert parse_address("123") is None # No prefix - assert parse_address("ABC") is None # No number - assert parse_address("X") is None # No number - - -class TestGetTypeCodeForAddress: - """Tests for get_type_code_for_address function.""" - - def test_bit_addresses(self): - """Test BIT type addresses.""" - assert get_type_code_for_address("X001") == TypeCode.BIT - assert get_type_code_for_address("Y001") == TypeCode.BIT - assert get_type_code_for_address("C1") == TypeCode.BIT - assert get_type_code_for_address("T1") == TypeCode.BIT - assert get_type_code_for_address("CT1") == TypeCode.BIT - assert get_type_code_for_address("SC1") == TypeCode.BIT - - def test_int_addresses(self): - """Test INT type addresses.""" - assert get_type_code_for_address("DS1") == TypeCode.INT - assert get_type_code_for_address("TD1") == TypeCode.INT - assert get_type_code_for_address("SD1") == TypeCode.INT - - def test_int2_addresses(self): - """Test INT2 type addresses.""" - assert get_type_code_for_address("DD1") == TypeCode.INT2 - assert get_type_code_for_address("CTD1") == TypeCode.INT2 - - def test_hex_addresses(self): - """Test HEX type addresses.""" - assert get_type_code_for_address("DH1") == TypeCode.HEX - assert get_type_code_for_address("XD0") == TypeCode.HEX - assert get_type_code_for_address("YD0") == TypeCode.HEX - - def test_float_addresses(self): - """Test FLOAT type addresses.""" - assert get_type_code_for_address("DF1") == TypeCode.FLOAT - - def test_txt_addresses(self): - """Test TXT type addresses.""" - assert get_type_code_for_address("TXT1") == TypeCode.TXT - - def test_invalid_address(self): - """Test invalid address returns None.""" - assert get_type_code_for_address("INVALID") is None - assert get_type_code_for_address("") is None - - -class TestIsAddressWritable: - """Tests for is_address_writable function.""" - - def test_regular_addresses_writable(self): - """Test that regular addresses are writable.""" - assert is_address_writable("X001") is True - assert is_address_writable("Y001") is True - assert is_address_writable("C1") is True - assert is_address_writable("DS1") is True - assert is_address_writable("DD1") is True - assert is_address_writable("DF1") is True - - def test_xd_yd_readonly(self): - """Test that XD and YD are read-only.""" - assert is_address_writable("XD0") is False - assert is_address_writable("XD0u") is False - assert is_address_writable("YD0") is False - assert is_address_writable("YD8") is False - - def test_sc_writable_addresses(self): - """Test specific SC addresses are writable.""" - for addr in WRITABLE_SC: - assert is_address_writable(f"SC{addr}") is True - - def test_sc_readonly_addresses(self): - """Test non-writable SC addresses are read-only.""" - # SC1 is not in WRITABLE_SC - assert is_address_writable("SC1") is False - assert is_address_writable("SC100") is False - - def test_sd_writable_addresses(self): - """Test specific SD addresses are writable.""" - for addr in WRITABLE_SD: - assert is_address_writable(f"SD{addr}") is True - - def test_sd_readonly_addresses(self): - """Test non-writable SD addresses are read-only.""" - # SD1 is not in WRITABLE_SD - assert is_address_writable("SD1") is False - assert is_address_writable("SD100") is False - - def test_invalid_address(self): - """Test invalid address returns False.""" - assert is_address_writable("INVALID") is False - assert is_address_writable("") is False - - -class TestDataviewRow: - """Tests for DataviewRow dataclass.""" - - def test_default_values(self): - """Test default values.""" - row = DataviewRow() - assert row.address == "" - assert row.type_code == 0 - assert row.new_value == "" - assert row.nickname == "" - assert row.comment == "" - - def test_is_empty(self): - """Test is_empty property.""" - row = DataviewRow() - assert row.is_empty is True - - row.address = "X001" - assert row.is_empty is False - - row.address = " " - assert row.is_empty is True - - def test_is_writable(self): - """Test is_writable property.""" - row = DataviewRow(address="X001") - assert row.is_writable is True - - row.address = "XD0" - assert row.is_writable is False - - def test_memory_type(self): - """Test memory_type property.""" - row = DataviewRow(address="DS100") - assert row.memory_type == "DS" - - row.address = "" - assert row.memory_type is None - - def test_address_number(self): - """Test address_number property.""" - row = DataviewRow(address="DS100") - assert row.address_number == "100" - - row.address = "XD0u" - assert row.address_number == "0u" - - def test_update_type_code(self): - """Test update_type_code method.""" - row = DataviewRow(address="DS100") - assert row.update_type_code() is True - assert row.type_code == TypeCode.INT - - row.address = "INVALID" - assert row.update_type_code() is False - - def test_clear(self): - """Test clear method.""" - row = DataviewRow( - address="X001", - type_code=TypeCode.BIT, - new_value="1", - nickname="Test", - comment="Comment", - ) - row.clear() - assert row.address == "" - assert row.type_code == 0 - assert row.new_value == "" - assert row.nickname == "" - assert row.comment == "" - - -class TestCreateEmptyDataview: - """Tests for create_empty_dataview function.""" - - def test_creates_correct_count(self): - """Test that correct number of rows is created.""" - rows = create_empty_dataview() - assert len(rows) == MAX_DATAVIEW_ROWS - - def test_all_rows_empty(self): - """Test that all rows are empty.""" - rows = create_empty_dataview() - assert all(row.is_empty for row in rows) - - def test_rows_are_independent(self): - """Test that rows are independent objects.""" - rows = create_empty_dataview() - rows[0].address = "X001" - assert rows[1].address == "" - - -class TestStorageToDisplay: - """Tests for storage_to_display conversion.""" - - def test_bit_values(self): - """Test BIT type conversion.""" - assert storage_to_display("1", TypeCode.BIT) == "1" - assert storage_to_display("0", TypeCode.BIT) == "0" - - def test_int_positive(self): - """Test INT (16-bit signed) positive values.""" - assert storage_to_display("0", TypeCode.INT) == "0" - assert storage_to_display("100", TypeCode.INT) == "100" - assert storage_to_display("32767", TypeCode.INT) == "32767" - - def test_int_negative(self): - """Test INT (16-bit signed) negative values stored as unsigned 32-bit.""" - # -32768 stored as 4294934528 (0xFFFF8000) - assert storage_to_display("4294934528", TypeCode.INT) == "-32768" - # -1 stored as 4294967295 (0xFFFFFFFF), but masked to 16-bit = -1 - assert storage_to_display("4294967295", TypeCode.INT) == "-1" - - def test_int2_positive(self): - """Test INT2 (32-bit signed) positive values.""" - assert storage_to_display("0", TypeCode.INT2) == "0" - assert storage_to_display("100", TypeCode.INT2) == "100" - assert storage_to_display("2147483647", TypeCode.INT2) == "2147483647" - - def test_int2_negative(self): - """Test INT2 (32-bit signed) negative values stored as unsigned 32-bit.""" - # -2147483648 stored as 2147483648 - assert storage_to_display("2147483648", TypeCode.INT2) == "-2147483648" - # -2 stored as 4294967294 - assert storage_to_display("4294967294", TypeCode.INT2) == "-2" - # -1 stored as 4294967295 - assert storage_to_display("4294967295", TypeCode.INT2) == "-1" - - def test_hex_values(self): - """Test HEX type (decimal to 4-digit hex string with leading zeros).""" - assert storage_to_display("65535", TypeCode.HEX) == "FFFF" - assert storage_to_display("255", TypeCode.HEX) == "00FF" - assert storage_to_display("0", TypeCode.HEX) == "0000" - - def test_txt_values(self): - """Test TXT type (ASCII code to character).""" - assert storage_to_display("48", TypeCode.TXT) == "0" - assert storage_to_display("65", TypeCode.TXT) == "A" - assert storage_to_display("90", TypeCode.TXT) == "Z" - assert storage_to_display("49", TypeCode.TXT) == "1" - - def test_empty_value(self): - """Test empty values return empty string.""" - assert storage_to_display("", TypeCode.INT) == "" - assert storage_to_display("", TypeCode.HEX) == "" - - -class TestDisplayToStorage: - """Tests for display_to_storage conversion.""" - - def test_bit_values(self): - """Test BIT type conversion.""" - assert display_to_storage("1", TypeCode.BIT) == "1" - assert display_to_storage("0", TypeCode.BIT) == "0" - - def test_int_positive(self): - """Test INT (16-bit signed) positive values.""" - assert display_to_storage("0", TypeCode.INT) == "0" - assert display_to_storage("100", TypeCode.INT) == "100" - assert display_to_storage("32767", TypeCode.INT) == "32767" - - def test_int_negative(self): - """Test INT (16-bit signed) negative values to unsigned 32-bit.""" - # -32768 should become 4294934528 - assert display_to_storage("-32768", TypeCode.INT) == "4294934528" - # -1 should become 4294967295 - assert display_to_storage("-1", TypeCode.INT) == "4294967295" - - def test_int2_positive(self): - """Test INT2 (32-bit signed) positive values.""" - assert display_to_storage("0", TypeCode.INT2) == "0" - assert display_to_storage("100", TypeCode.INT2) == "100" - - def test_int2_negative(self): - """Test INT2 (32-bit signed) negative values to unsigned 32-bit.""" - # -2147483648 should become 2147483648 - assert display_to_storage("-2147483648", TypeCode.INT2) == "2147483648" - # -2 should become 4294967294 - assert display_to_storage("-2", TypeCode.INT2) == "4294967294" - - def test_hex_values(self): - """Test HEX type (hex string to decimal).""" - assert display_to_storage("FFFF", TypeCode.HEX) == "65535" - assert display_to_storage("FF", TypeCode.HEX) == "255" - assert display_to_storage("0xFF", TypeCode.HEX) == "255" - assert display_to_storage("0", TypeCode.HEX) == "0" - - def test_txt_values(self): - """Test TXT type (character to ASCII code).""" - assert display_to_storage("0", TypeCode.TXT) == "48" - assert display_to_storage("A", TypeCode.TXT) == "65" - assert display_to_storage("Z", TypeCode.TXT) == "90" - assert display_to_storage("1", TypeCode.TXT) == "49" - - def test_empty_value(self): - """Test empty values return empty string.""" - assert display_to_storage("", TypeCode.INT) == "" - assert display_to_storage("", TypeCode.HEX) == "" - - -class TestRoundTripConversion: - """Tests for round-trip storage <-> display conversion.""" - - def test_int_roundtrip(self): - """Test INT values round-trip correctly.""" - for val in ["-32768", "-1", "0", "100", "32767"]: - storage = display_to_storage(val, TypeCode.INT) - display = storage_to_display(storage, TypeCode.INT) - assert display == val, f"Round-trip failed for {val}" - - def test_int2_roundtrip(self): - """Test INT2 values round-trip correctly.""" - for val in ["-2147483648", "-2", "-1", "0", "100", "2147483647"]: - storage = display_to_storage(val, TypeCode.INT2) - display = storage_to_display(storage, TypeCode.INT2) - assert display == val, f"Round-trip failed for {val}" - - def test_hex_roundtrip(self): - """Test HEX values round-trip correctly (display has 4-digit format).""" - # Note: display format is always 4 digits with leading zeros - test_cases = [ - ("0", "0000"), - ("FF", "00FF"), - ("FFFF", "FFFF"), - ] - for input_val, expected_display in test_cases: - storage = display_to_storage(input_val, TypeCode.HEX) - display = storage_to_display(storage, TypeCode.HEX) - assert display == expected_display, f"Round-trip failed for {input_val}" - - def test_txt_roundtrip(self): - """Test TXT values round-trip correctly.""" - for val in ["0", "A", "Z", "1"]: - storage = display_to_storage(val, TypeCode.TXT) - display = storage_to_display(storage, TypeCode.TXT) - assert display == val, f"Round-trip failed for {val}" diff --git a/tests/test_row_service.py b/tests/test_row_service.py index 8c1ea55..2e67f48 100644 --- a/tests/test_row_service.py +++ b/tests/test_row_service.py @@ -1,9 +1,9 @@ """Tests for RowService.""" import pytest +from pyclickplc.addresses import get_addr_key from clicknick.data.address_store import AddressStore -from clicknick.models.address_row import get_addr_key from clicknick.services.row_service import RowService diff --git a/tests/test_validation.py b/tests/test_validation.py index 09d77df..6c8e248 100644 --- a/tests/test_validation.py +++ b/tests/test_validation.py @@ -1,8 +1,8 @@ """Tests for validation.py - nickname and initial value validation.""" import pytest +from pyclickplc.banks import DataType -from clicknick.models.constants import DataType from clicknick.models.validation import ( validate_initial_value, validate_nickname, diff --git a/uv.lock b/uv.lock index 24a7b7f..e2ebc43 100644 --- a/uv.lock +++ b/uv.lock @@ -1,11 +1,12 @@ version = 1 -revision = 2 +revision = 3 requires-python = ">=3.11, <4.0" [[package]] name = "clicknick" source = { editable = "." } dependencies = [ + { name = "pyclickplc" }, { name = "pyodbc" }, { name = "pywin32" }, { name = "tksheet" }, @@ -23,6 +24,7 @@ dev = [ [package.metadata] requires-dist = [ + { name = "pyclickplc" }, { name = "pyodbc", specifier = ">=5.3.0" }, { name = "pywin32", specifier = ">=311" }, { name = "tksheet", specifier = "==7.5.19" }, @@ -122,6 +124,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, ] +[[package]] +name = "pyclickplc" +version = "0.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pymodbus" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b5/ed/003596602158fbc859a03d7b710c9f9be3d35e3a5cadfd0a9906f08c1285/pyclickplc-0.1.0.tar.gz", hash = "sha256:e8832a81b92c829e5d0691c9516720af72cef7d8bdd794f785b16a034e4c118d", size = 122353, upload-time = "2026-02-27T14:23:49.066Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/06/34/92814d0c31dcbfa9e3861625cfd0b248249f5ac6d47c2030f09eeea226e8/pyclickplc-0.1.0-py3-none-any.whl", hash = "sha256:148d20d14b0a512cf610440b21ff49215c2d70471ddcf660b93e5d9407d0d6da", size = 49203, upload-time = "2026-02-27T14:23:47.605Z" }, +] + [[package]] name = "pygments" version = "2.19.2" @@ -131,6 +145,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, ] +[[package]] +name = "pymodbus" +version = "3.11.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2a/af/bbb716301ab9c60f0702c3cdf72cc0e373286a5648fe16bc4431400489dc/pymodbus-3.11.4.tar.gz", hash = "sha256:6910e385cb6b2f983cd457e9ecee2ff580dbb23cf3d84aefec0845e71edd606a", size = 163422, upload-time = "2025-11-30T10:36:33.717Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/02/80/84c32440949c77f2c201c38e5fa56fd8f19da31bf24df55bdbdaba67767c/pymodbus-3.11.4-py3-none-any.whl", hash = "sha256:89865929f53bd5e32b4076dde00ee86d9b8afb1686832ed74e69d55df22729c3", size = 166002, upload-time = "2025-11-30T10:36:31.992Z" }, +] + [[package]] name = "pyodbc" version = "5.3.0"