From b7fae82768e956c2cf1191862605723b31a58815 Mon Sep 17 00:00:00 2001 From: Armon Kolaei Date: Thu, 26 Mar 2026 22:42:54 -0400 Subject: [PATCH] feat(frontend): add Simple/Advanced toggle with AI-assisted seed + prompt generation Introduced a UI toggle between Simple and Advanced modes for simulation setup. Simple mode allows users to enter a single high-level prompt, which is then used to automatically generate both the Reality Seed and Simulation Prompt via the existing API. Advanced mode preserves the original manual workflow with separate inputs for Reality Seed and Prompt. Improves usability for non-technical users and streamlines simulation creation. --- backend/app/api/graph.py | 74 ++++++++++++ frontend/src/api/graph.js | 13 ++ frontend/src/views/Home.vue | 228 +++++++++++++++++++++++++++++------- 3 files changed, 275 insertions(+), 40 deletions(-) diff --git a/backend/app/api/graph.py b/backend/app/api/graph.py index 59f915e..edb2e33 100644 --- a/backend/app/api/graph.py +++ b/backend/app/api/graph.py @@ -15,6 +15,7 @@ from ..services.text_processor import TextProcessor from ..utils.file_parser import FileParser from ..utils.logger import get_logger +from ..utils.llm_client import LLMClient from ..models.task import TaskManager, TaskStatus from ..models.project import ProjectManager, ProjectStatus @@ -595,3 +596,76 @@ def delete_graph(graph_id: str): "error": str(e), "traceback": traceback.format_exc() }), 500 + + +# ============== Simple Mode: Auto-generate Seed + Prompt ============== + +@graph_bp.route('/simple-generate', methods=['POST']) +def simple_generate(): + """ + Given a plain-language description, use the LLM to produce: + - seed_content: a structured context document (the "reality seed" .txt) + - prompt: a structured simulation prompt ready for MiroFish + Body JSON: { "description": "...user's plain-language goal..." } + """ + try: + import datetime + body = request.get_json(force=True, silent=True) or {} + description = (body.get('description') or '').strip() + if not description: + return jsonify({"success": False, "error": "Missing 'description' in request body"}), 400 + + llm = LLMClient() + + system_prompt = """You are an expert simulation designer for MiroFish, an offline multi-agent prediction engine. +Given a plain-language goal from a user, produce TWO outputs as a JSON object with exactly two keys: + +1. "seed_content": A structured context document (plain text) that serves as the "reality seed" for the simulation. + Include: + - Date header + - Relevant factual context organized under clear headings + - Key factors (market, geopolitical, social, etc.) + - A concise "Goal:" section at the end + +2. "prompt": A concise, structured simulation prompt for an AI agent. + Include: + - Role assignment ("Act as ...") + - The date + - Reference to the provided context + - "Consider:" bullet points + - "Output:" numbered deliverables + - Instruction to be decisive and specific + +Return ONLY valid JSON with keys "seed_content" and "prompt". No markdown, no extra explanation.""" + + today = datetime.date.today().strftime('%B %d, %Y') + user_message = f"Today's date: {today}\n\nUser goal: {description}" + + result = llm.chat_json( + messages=[ + {"role": "system", "content": system_prompt}, + {"role": "user", "content": user_message} + ], + temperature=0.5, + max_tokens=4096 + ) + + seed_content = result.get('seed_content', '') + prompt_text = result.get('prompt', '') + + if not seed_content or not prompt_text: + return jsonify({"success": False, "error": "LLM returned incomplete output", "raw": result}), 500 + + return jsonify({ + "success": True, + "seed_content": seed_content, + "prompt": prompt_text + }) + + except Exception as e: + logger.error(f"simple_generate error: {e}") + return jsonify({ + "success": False, + "error": str(e), + "traceback": traceback.format_exc() + }), 500 diff --git a/frontend/src/api/graph.js b/frontend/src/api/graph.js index 4047f30..f0dc4b6 100644 --- a/frontend/src/api/graph.js +++ b/frontend/src/api/graph.js @@ -68,3 +68,16 @@ export function getProject(projectId) { method: 'get' }) } + +/** + * Simple mode: generate seed document + simulation prompt from a plain-language description + * @param {String} description - User's plain-language prediction goal + * @returns {Promise<{ seed_content: string, prompt: string }>} + */ +export function generateSeedAndPrompt(description) { + return service({ + url: '/api/graph/simple-generate', + method: 'post', + data: { description } + }) +} diff --git a/frontend/src/views/Home.vue b/frontend/src/views/Home.vue index 36bb714..e08f403 100644 --- a/frontend/src/views/Home.vue +++ b/frontend/src/views/Home.vue @@ -86,54 +86,131 @@
+ + +
+ + + {{ simpleMode ? 'Describe your goal — we generate the seed & prompt for you' : 'Upload your own seed document and write a custom prompt' }} +
+
-
-
- 01 / Reality Seeds - Supported: PDF, MD, TXT + + + + + + -
- -
@@ -147,6 +224,7 @@ import { ref, computed, reactive } from 'vue' import { useRouter } from 'vue-router' import HistoryDatabase from '../components/HistoryDatabase.vue' +import { generateSeedAndPrompt } from '../api/graph' const mono = 'JetBrains Mono, monospace' const sans = 'Space Grotesk, Noto Sans SC, system-ui, sans-serif' @@ -196,6 +274,12 @@ const s = reactive({ stepTitle: { fontWeight: '520', fontSize: '1rem', marginBottom: '4px' }, stepDesc: { fontSize: '0.85rem', color: '#666' }, rightPanel: { flex: '1.2', display: 'flex', flexDirection: 'column' }, + // Mode toggle + modeToggleRow: { display: 'flex', alignItems: 'center', gap: '0', marginBottom: '12px', flexWrap: 'wrap', rowGap: '8px' }, + modeTab: { fontFamily: mono, fontSize: '0.8rem', fontWeight: '700', letterSpacing: '1px', padding: '8px 22px', border: '1px solid #CCC', background: '#fff', color: '#888', cursor: 'pointer' }, + modeTabActive: { fontFamily: mono, fontSize: '0.8rem', fontWeight: '700', letterSpacing: '1px', padding: '8px 22px', border: '1px solid #000', background: '#000', color: '#fff', cursor: 'pointer' }, + modeHint: { fontFamily: mono, fontSize: '0.72rem', color: '#AAA', marginLeft: '16px' }, + // Console consoleBox: { border: '1px solid #CCC', padding: '8px' }, consoleSection: { padding: '20px' }, consoleHeader: { display: 'flex', justifyContent: 'space-between', marginBottom: '15px', fontFamily: mono, fontSize: '0.75rem', color: '#666' }, @@ -211,10 +295,12 @@ const s = reactive({ consoleDivider: { display: 'flex', alignItems: 'center', margin: '10px 0', borderTop: '1px solid #EEE' }, consoleDividerText: { padding: '0 15px', fontFamily: mono, fontSize: '0.7rem', color: '#BBB', letterSpacing: '1px' }, inputWrapper: { position: 'relative', border: '1px solid #DDD', background: '#FAFAFA' }, - codeInput: { width: '100%', border: 'none', background: 'transparent', padding: '20px', fontFamily: mono, fontSize: '0.9rem', lineHeight: '1.6', resize: 'vertical', outline: 'none', minHeight: '150px' }, + codeInput: { width: '100%', border: 'none', background: 'transparent', padding: '20px', fontFamily: mono, fontSize: '0.9rem', lineHeight: '1.6', resize: 'vertical', outline: 'none', minHeight: '150px', boxSizing: 'border-box' }, modelBadge: { position: 'absolute', bottom: '10px', right: '15px', fontFamily: mono, fontSize: '0.7rem', color: '#AAA' }, btnSection: { padding: '0 20px 20px' }, startEngineBtn: { width: '100%', background: '#000', color: '#fff', border: 'none', padding: '20px', fontFamily: mono, fontWeight: '700', fontSize: '1.1rem', display: 'flex', justifyContent: 'space-between', alignItems: 'center', cursor: 'pointer', letterSpacing: '1px' }, + generateBtn: { width: '100%', background: '#FF4500', color: '#fff', border: 'none', padding: '18px 20px', fontFamily: mono, fontWeight: '700', fontSize: '1rem', display: 'flex', justifyContent: 'space-between', alignItems: 'center', cursor: 'pointer', letterSpacing: '1px' }, + errorBox: { margin: '0 20px 12px', padding: '12px 16px', border: '1px solid #FF4500', background: '#FFF5F2', color: '#FF4500', fontFamily: mono, fontSize: '0.8rem' }, }) const steps = [ @@ -227,6 +313,7 @@ const steps = [ const router = useRouter() +// ── Advanced mode state ── const formData = ref({ simulationRequirement: '' }) const files = ref([]) const loading = ref(false) @@ -234,10 +321,30 @@ const error = ref('') const isDragOver = ref(false) const fileInput = ref(null) +// ── Simple mode state ── +const simpleMode = ref(true) +const simpleDescription = ref('') +const isGenerating = ref(false) +const generatedSeed = ref('') +const generatedPrompt = ref('') +const generateError = ref('') + +// ── Mode switching ── +const switchMode = (toSimple) => { + simpleMode.value = toSimple + generateError.value = '' +} + +// ── Computed ── const canSubmit = computed(() => { return formData.value.simulationRequirement.trim() !== '' && files.value.length > 0 }) +const canSubmitSimple = computed(() => { + return generatedSeed.value.trim() !== '' && generatedPrompt.value.trim() !== '' +}) + +// ── Advanced mode handlers ── const triggerFileInput = () => { if (!loading.value) fileInput.value?.click() } const handleFileSelect = (event) => { addFiles(Array.from(event.target.files)) } const handleDragOver = (e) => { isDragOver.value = true } @@ -254,10 +361,51 @@ const removeFile = (index) => { files.value.splice(index, 1) } const scrollToBottom = () => { window.scrollTo({ top: document.body.scrollHeight, behavior: 'smooth' }) } +// ── Simple mode: generate seed + prompt via LLM ── +const generateSimple = async () => { + if (!simpleDescription.value.trim() || isGenerating.value) return + isGenerating.value = true + generateError.value = '' + generatedSeed.value = '' + generatedPrompt.value = '' + try { + const res = await generateSeedAndPrompt(simpleDescription.value.trim()) + // axios interceptor already unwraps response.data, so res IS the payload + if (res && res.seed_content && res.prompt) { + generatedSeed.value = res.seed_content + generatedPrompt.value = res.prompt + } else { + generateError.value = res?.error || 'Generation failed — please try again.' + } + } catch (e) { + generateError.value = e?.response?.data?.error || e.message || 'Network error during generation.' + } finally { + isGenerating.value = false + } +} + +const resetSimple = () => { + generatedSeed.value = '' + generatedPrompt.value = '' + generateError.value = '' +} + +// ── Start simulation ── const startSimulation = () => { - if (!canSubmit.value || loading.value) return + if (loading.value) return import('../store/pendingUpload.js').then(({ setPendingUpload }) => { - setPendingUpload(files.value, formData.value.simulationRequirement) + if (simpleMode.value) { + // Create an in-memory .txt File from the generated seed content + const seedFile = new File( + [generatedSeed.value], + 'seed.txt', + { type: 'text/plain' } + ) + setPendingUpload([seedFile], generatedPrompt.value) + } else { + if (!canSubmit.value) return + setPendingUpload(files.value, formData.value.simulationRequirement) + } router.push({ name: 'Process', params: { projectId: 'new' } }) }) }