Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ const ToolCallRenderer: FC<ToolCallRendererProps> = ({ content }) => {
key={tool_result.content}
/>
) : (
<p key={tool_result}>${String(tool_result)}</p>
<p key={String(tool_result)}>{String(tool_result)}</p>
);
})}
</div>
Expand Down
14 changes: 14 additions & 0 deletions python/valuecell/agents/research_agent/schemas.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,19 @@
from dataclasses import dataclass
from pathlib import Path
from typing import TypedDict


class SECFilingMetadataDict(TypedDict):
doc_type: str
company: str
period_of_report: str
filing_date: str


class SECFilingResultDict(TypedDict):
name: str
path: str
metadata: SECFilingMetadataDict


@dataclass
Expand Down
38 changes: 32 additions & 6 deletions python/valuecell/agents/research_agent/sources.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
AShareFilingResult,
SECFilingMetadata,
SECFilingResult,
SECFilingResultDict,
)


Expand Down Expand Up @@ -126,6 +127,27 @@ async def _write_and_ingest(
return results


def _serialize_sec_filing_results(
results: List[SECFilingResult],
) -> List[SECFilingResultDict]:
"""Return a JSON-serializable SEC filing summary list."""
serialized: List[SECFilingResultDict] = []
for result in results:
serialized.append(
{
"name": result.name,
"path": str(result.path),
"metadata": {
"doc_type": result.metadata.doc_type,
"company": result.metadata.company,
"period_of_report": result.metadata.period_of_report,
"filing_date": result.metadata.filing_date,
},
}
)
return serialized


async def fetch_periodic_sec_filings(
cik_or_ticker: str,
forms: List[str] | str = "10-Q",
Expand All @@ -151,7 +173,7 @@ async def fetch_periodic_sec_filings(
limit: When `year` is omitted, number of latest filings to return (by filing_date). Defaults to 10.

Returns:
List[SECFilingResult]
List[SECFilingResultDict] summarizing the fetched filings.
"""
req_forms = set(_ensure_list(forms)) or {"10-Q"}
company = await asyncio.to_thread(lambda: Company(cik_or_ticker))
Expand All @@ -170,13 +192,15 @@ async def fetch_periodic_sec_filings(
items = list(filings)
else:
items = [filings]
return await _write_and_ingest(items, Path(get_knowledge_path()))
result = await _write_and_ingest(items, Path(get_knowledge_path()))
return _serialize_sec_filing_results(result)

filings = await asyncio.to_thread(
lambda: company.get_filings(form=list(req_forms), year=year, quarter=quarter)
)

return await _write_and_ingest(filings, Path(get_knowledge_path()))
result = await _write_and_ingest(filings, Path(get_knowledge_path()))
return _serialize_sec_filing_results(result)


async def fetch_event_sec_filings(
Expand All @@ -197,7 +221,7 @@ async def fetch_event_sec_filings(
(Note: The tool will always ingest written markdown into the knowledge base.)

Returns:
List[SECFilingResult]
List[SECFilingResultDict] summarizing the fetched filings.
"""
sd = _parse_date(start_date)
ed = _parse_date(end_date)
Expand All @@ -216,7 +240,8 @@ async def fetch_event_sec_filings(
items = list(filings)
else:
items = [filings]
return await _write_and_ingest(items, Path(get_knowledge_path()))
result = await _write_and_ingest(items, Path(get_knowledge_path()))
return _serialize_sec_filing_results(result)

# Otherwise, fetch and filter by filing_date range
filings = await asyncio.to_thread(lambda: company.get_filings(form=list(req_forms)))
Expand All @@ -239,7 +264,8 @@ async def fetch_event_sec_filings(
if limit is not None and limit > 0:
filtered = filtered[:limit]

return await _write_and_ingest(filtered, Path(get_knowledge_path()))
result = await _write_and_ingest(filtered, Path(get_knowledge_path()))
return _serialize_sec_filing_results(result)


async def web_search(query: str) -> str:
Expand Down
4 changes: 2 additions & 2 deletions python/valuecell/core/event/buffer.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def __init__(
agent_name: Optional[str] = None,
):
self.parts: List[str] = []
self.last_updated: float = time.monotonic()
self.last_updated: float = time.perf_counter()
# Stable paragraph id for this buffer entry. Reused across streamed chunks
# until this entry is flushed (debounce/boundary). On size-based flush,
# we rotate to a new paragraph id for subsequent chunks.
Expand All @@ -63,7 +63,7 @@ def append(self, text: str):
"""Append a chunk of text to this buffer and update the timestamp."""
if text:
self.parts.append(text)
self.last_updated = time.monotonic()
self.last_updated = time.perf_counter()

def snapshot_payload(self) -> Optional[BaseResponseDataPayload]:
"""Return the current aggregate content as a payload without clearing.
Expand Down
101 changes: 88 additions & 13 deletions python/valuecell/core/event/factory.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import Optional
from typing import Any, Optional

from typing_extensions import Literal

Expand Down Expand Up @@ -34,6 +34,80 @@
from valuecell.utils.uuid import generate_item_id, generate_uuid


def _parse_tool_result_payload(result: str) -> Any | None:
"""Best-effort parse of tool result strings into Python values."""
import ast
import json

try:
return json.loads(result)
except (json.JSONDecodeError, TypeError):
pass

try:
return ast.literal_eval(result)
except (SyntaxError, ValueError):
return None


def _extract_sec_filing_item(item: Any) -> dict[str, str] | None:
"""Normalize SEC filing tool payload items into a flat mapping."""
if not isinstance(item, dict):
return None

metadata = item.get("metadata")
if not isinstance(metadata, dict):
return None

normalized = {
"name": str(item.get("name") or ""),
"path": str(item.get("path") or ""),
"doc_type": str(metadata.get("doc_type") or ""),
"company": str(metadata.get("company") or ""),
"period_of_report": str(metadata.get("period_of_report") or ""),
"filing_date": str(metadata.get("filing_date") or ""),
}

required_keys = ("doc_type", "company", "filing_date")
if not all(normalized[key] for key in required_keys):
return None
return normalized


def _format_sec_filing_result_for_frontend(result: list[Any]) -> str | None:
"""Render SEC filing tool output into a compact Markdown summary."""
from pathlib import PurePath

filings: list[dict[str, str]] = []
for item in result:
filing = _extract_sec_filing_item(item)
if filing is None:
return None
filings.append(filing)

if not filings:
return None

lines = [f"**Fetched {len(filings)} SEC filing(s)**", ""]
for index, filing in enumerate(filings, start=1):
path_value = filing["path"]
path_name = PurePath(path_value).name if path_value else filing["name"]
lines.append(f"{index}. **{filing['company']}** `{filing['doc_type']}`")
lines.append(f" - **Filed:** {filing['filing_date']}")
if filing["period_of_report"]:
lines.append(f" - **Period end:** {filing['period_of_report']}")
if path_name:
lines.append(f" - **Saved as:** `{path_name}`")
if path_value:
lines.append(
" - <details><summary>Local path</summary>"
f"<code>{path_value}</code></details>"
)
lines.append("")

return "\n".join(lines).strip()


def _format_tool_result_for_frontend(result: str | None) -> str | None:
"""Format tool result as JSON array for frontend rendering.

Expand All @@ -53,18 +127,19 @@ def _format_tool_result_for_frontend(result: str | None) -> str | None:
if not result:
return result

# Check if already in expected format: [{"content": ...}]
try:
parsed = json.loads(result)
if (
isinstance(parsed, list)
and len(parsed) > 0
and isinstance(parsed[0], dict)
and "content" in parsed[0]
):
return result
except (json.JSONDecodeError, TypeError):
pass
parsed = _parse_tool_result_payload(result)
if (
isinstance(parsed, list)
and len(parsed) > 0
and isinstance(parsed[0], dict)
and "content" in parsed[0]
):
return result

if isinstance(parsed, list):
sec_markdown = _format_sec_filing_result_for_frontend(parsed)
if sec_markdown is not None:
return json.dumps([{"content": sec_markdown}])

return json.dumps([{"content": result}])

Expand Down
48 changes: 48 additions & 0 deletions python/valuecell/core/event/tests/test_response_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,6 +215,54 @@ def test_error_message_wrapped(self):
parsed = json.loads(result)
assert parsed == [{"content": "ERROR: connection failed"}]

def test_sec_filing_payload_rendered_as_markdown_summary(self):
input_payload = json.dumps(
[
{
"name": "10-Q_aapl-20250628.md",
"path": "/tmp/10-Q_aapl-20250628.md",
"metadata": {
"doc_type": "10-Q",
"company": "Apple Inc.",
"period_of_report": "2025-06-28",
"filing_date": "2025-08-01",
},
}
]
)

result = _format_tool_result_for_frontend(input_payload)
parsed = json.loads(result)

assert len(parsed) == 1
assert "Fetched 1 SEC filing(s)" in parsed[0]["content"]
assert "**Apple Inc.**" in parsed[0]["content"]
assert "`10-Q`" in parsed[0]["content"]
assert "Period end:** 2025-06-28" in parsed[0]["content"]
assert "Local path" in parsed[0]["content"]

def test_python_literal_sec_filing_payload_rendered_as_markdown_summary(self):
input_payload = str(
[
{
"name": "10-K_msft-20241231.md",
"path": "/tmp/10-K_msft-20241231.md",
"metadata": {
"doc_type": "10-K",
"company": "Microsoft Corporation",
"period_of_report": "2024-12-31",
"filing_date": "2025-02-01",
},
}
]
)

result = _format_tool_result_for_frontend(input_payload)
parsed = json.loads(result)

assert "Microsoft Corporation" in parsed[0]["content"]
assert "`10-K`" in parsed[0]["content"]


# ============================================================
# Tests for tool_call method with formatting
Expand Down