diff --git a/packages/bitbadgesjs-sdk/scripts/gen-skill-docs.ts b/packages/bitbadgesjs-sdk/scripts/gen-skill-docs.ts new file mode 100644 index 0000000000..6f1a3d9dac --- /dev/null +++ b/packages/bitbadgesjs-sdk/scripts/gen-skill-docs.ts @@ -0,0 +1,112 @@ +#!/usr/bin/env tsx +/** + * Generate docs pages from builder skill instructions. + * + * Source of truth: src/builder/resources/skillInstructions.ts + * Output: ../bitbadges-docs/x-tokenization/examples/skills/ + * + * Ported from the old bitbadges-builder-mcp/scripts/gen-skill-docs.ts + * when the MCP server was folded into the SDK. The only real change is + * the import path β€” SKILL_INSTRUCTIONS now lives under the SDK's builder + * subpath. Output shape + docs destination are unchanged so the existing + * SUMMARY.md entries and any review diffs stay stable. + * + * Usage: npx tsx scripts/gen-skill-docs.ts + * (optional) DOCS_OUTPUT_DIR=/path/to/override npx tsx scripts/gen-skill-docs.ts + */ + +import { SKILL_INSTRUCTIONS } from '../src/builder/resources/skillInstructions.js'; +import { writeFileSync, mkdirSync } from 'fs'; +import { dirname, join } from 'path'; +import { fileURLToPath } from 'url'; + +// Resolve __dirname in a way that works both when tsx runs this file as +// ESM (import.meta.url present) and when it transpiles to CJS (falls +// back to __dirname if already defined). The docs output path is +// resolved relative to this script's location so running from any cwd +// produces the same result. +const here = + typeof __dirname !== 'undefined' + ? __dirname + : dirname(fileURLToPath(import.meta.url)); + +const DOCS_DIR = + process.env.DOCS_OUTPUT_DIR || + join(here, '../../../../bitbadges-docs/x-tokenization/examples/skills'); + +mkdirSync(DOCS_DIR, { recursive: true }); + +const categoryLabels: Record = { + 'token-type': 'Token Types', + standard: 'Standards', + approval: 'Approval Patterns', + feature: 'Features', + advanced: 'Advanced', +}; + +// Group skills by category for the README +const byCategory = new Map(); +for (const skill of SKILL_INSTRUCTIONS) { + const list = byCategory.get(skill.category) || []; + list.push(skill); + byCategory.set(skill.category, list); +} + +// Generate individual skill pages +for (const skill of SKILL_INSTRUCTIONS) { + const filename = `${skill.id}.md`; + const refs = skill.referenceCollectionIds?.length + ? `\n\n## Reference Collections\n\n${skill.referenceCollectionIds + .map((id) => `- [Collection ${id}](https://bitbadges.io/collections/${id})`) + .join('\n')}` + : ''; + + const content = `# ${skill.name} + +> ${skill.description} + +**Category:** ${categoryLabels[skill.category] || skill.category} + +## Summary + +${skill.summary} + +## Instructions + +${skill.instructions}${refs} +`; + + writeFileSync(join(DOCS_DIR, filename), content); +} + +// Generate README index +const categoryOrder = ['token-type', 'standard', 'approval', 'feature', 'advanced']; +let readme = `# πŸ€– Builder Skills + +These pages document every guided build skill available in the BitBadges Builder (shipped as part of [bitbadgesjs-sdk](https://github.com/BitBadges/bitbadgesjs)). Each skill provides step-by-step instructions for building a specific type of token or configuring a specific feature. + +> **Tip:** If you're using the BitBadges builder in Claude, Cursor, or another AI tool via the MCP server (\`npx bitbadgesjs-sdk-mcp\`) or the \`bitbadges-cli builder\` command surface, these instructions are loaded automatically when you select a skill. These pages are provided as a human-readable reference. + +`; + +for (const cat of categoryOrder) { + const skills = byCategory.get(cat); + if (!skills?.length) continue; + readme += `## ${categoryLabels[cat] || cat}\n\n`; + for (const skill of skills) { + readme += `- [${skill.name}](${skill.id}.md) β€” ${skill.description}\n`; + } + readme += '\n'; +} + +writeFileSync(join(DOCS_DIR, 'README.md'), readme); + +// Print SUMMARY.md lines for easy copy-paste +console.log('\n=== Add these lines to SUMMARY.md as a top-level section ===\n'); +console.log('## πŸ€– Builder Skills\n'); +console.log('* [Overview](x-tokenization/examples/skills/README.md)'); +for (const skill of SKILL_INSTRUCTIONS) { + console.log(` * [${skill.name}](x-tokenization/examples/skills/${skill.id}.md)`); +} + +console.log(`\nβœ… Generated ${SKILL_INSTRUCTIONS.length} skill pages + README in ${DOCS_DIR}`); diff --git a/packages/bitbadgesjs-sdk/src/builder/index.ts b/packages/bitbadgesjs-sdk/src/builder/index.ts index 7a5c29bfce..02a0aef4e4 100644 --- a/packages/bitbadgesjs-sdk/src/builder/index.ts +++ b/packages/bitbadgesjs-sdk/src/builder/index.ts @@ -1,11 +1,15 @@ #!/usr/bin/env node /** - * BitBadges Builder MCP Server + * BitBadges Builder β€” Model Context Protocol (MCP) stdio server bin. * - * Enable natural language collection creation via MCP. + * Entry point when users run `bitbadges-builder` or point Claude Desktop at + * this package as an MCP server. The actual tool/resource handlers live in + * `./tools/registry.ts` and `./resources/registry.ts` β€” this file is just + * the stdio transport + lifecycle wrapper. For in-process use, import the + * registry directly or call `bitbadges-cli builder …`. * - * Example usage: + * Example natural-language usage (via an MCP client): * > "Create a 1:1 backed USDC stablecoin with 100 USDC/day spend limit" * * Supported collection types: diff --git a/packages/bitbadgesjs-sdk/src/builder/resources/examplesDocs.ts b/packages/bitbadgesjs-sdk/src/builder/resources/examplesDocs.ts index a5a9bd2f03..1441cdc162 100644 --- a/packages/bitbadgesjs-sdk/src/builder/resources/examplesDocs.ts +++ b/packages/bitbadgesjs-sdk/src/builder/resources/examplesDocs.ts @@ -130,7 +130,26 @@ Create a collection of 100 unique NFTs with a public mint approval. "cosmosCoinBackedPath": null }, "aliasPathsToAdd": [], - "cosmosCoinWrapperPathsToAdd": [] + "cosmosCoinWrapperPathsToAdd": [], + "_meta": { + "metadataPlaceholders": { + "ipfs://METADATA_COLLECTION": { + "name": "My NFT Collection", + "description": "A collection of 100 unique NFTs.", + "image": "ipfs://QmCollectionImage..." + }, + "ipfs://METADATA_TOKEN_{id}": { + "name": "My NFT Collection Token", + "description": "A unique NFT from the My NFT Collection.", + "image": "ipfs://QmTokenImage..." + }, + "ipfs://METADATA_APPROVAL_mint": { + "name": "Public Mint", + "description": "Allows anyone to mint one NFT for 5 BADGE.", + "image": "" + } + } + } } } ], @@ -138,23 +157,6 @@ Create a collection of 100 unique NFTs with a public mint approval. "fee": { "amount": [{ "denom": "ubadge", "amount": "5000" }], "gas": "500000" - }, - "metadataPlaceholders": { - "ipfs://METADATA_COLLECTION": { - "name": "My NFT Collection", - "description": "A collection of 100 unique NFTs.", - "image": "ipfs://QmCollectionImage..." - }, - "ipfs://METADATA_TOKEN_{id}": { - "name": "My NFT Collection Token", - "description": "A unique NFT from the My NFT Collection.", - "image": "ipfs://QmTokenImage..." - }, - "ipfs://METADATA_APPROVAL_mint": { - "name": "Public Mint", - "description": "Allows anyone to mint one NFT for 5 BADGE.", - "image": "" - } } } \`\`\``, @@ -253,7 +255,31 @@ Create a fungible token with unlimited supply. "cosmosCoinBackedPath": null }, "aliasPathsToAdd": [], - "cosmosCoinWrapperPathsToAdd": [] + "cosmosCoinWrapperPathsToAdd": [], + "_meta": { + "metadataPlaceholders": { + "ipfs://METADATA_COLLECTION": { + "name": "My Token", + "description": "A fungible token with unlimited supply.", + "image": "ipfs://QmTokenImage..." + }, + "ipfs://METADATA_TOKEN": { + "name": "MY", + "description": "The MY token unit.", + "image": "ipfs://QmTokenImage..." + }, + "ipfs://METADATA_APPROVAL_mint": { + "name": "Public Mint", + "description": "Allows anyone to mint tokens freely.", + "image": "" + }, + "ipfs://METADATA_APPROVAL_transfer": { + "name": "Free Transfer", + "description": "Allows free transfer between users.", + "image": "" + } + } + } } } ], @@ -261,28 +287,6 @@ Create a fungible token with unlimited supply. "fee": { "amount": [{ "denom": "ubadge", "amount": "5000" }], "gas": "500000" - }, - "metadataPlaceholders": { - "ipfs://METADATA_COLLECTION": { - "name": "My Token", - "description": "A fungible token with unlimited supply.", - "image": "ipfs://QmTokenImage..." - }, - "ipfs://METADATA_TOKEN": { - "name": "MY", - "description": "The MY token unit.", - "image": "ipfs://QmTokenImage..." - }, - "ipfs://METADATA_APPROVAL_mint": { - "name": "Public Mint", - "description": "Allows anyone to mint tokens freely.", - "image": "" - }, - "ipfs://METADATA_APPROVAL_transfer": { - "name": "Free Transfer", - "description": "Allows free transfer between users.", - "image": "" - } } } \`\`\``, diff --git a/packages/bitbadgesjs-sdk/src/builder/resources/frontendDocs.ts b/packages/bitbadgesjs-sdk/src/builder/resources/frontendDocs.ts index 76d980ca5f..2079eea00a 100644 --- a/packages/bitbadgesjs-sdk/src/builder/resources/frontendDocs.ts +++ b/packages/bitbadgesjs-sdk/src/builder/resources/frontendDocs.ts @@ -28,7 +28,7 @@ const FRONTEND_DOCS_CONTENT = { ### Address Handling - Always validate addresses before use (0x or bb1 format) -- Use \`convert_address\` MCP tool or SDK's \`ethToCosmos\`/\`cosmosToEth\` for format conversion +- Use the \`convert_address\` builder tool or SDK's \`ethToCosmos\`/\`cosmosToEth\` for format conversion - Display resolved names when available ### Amount Display @@ -37,7 +37,7 @@ const FRONTEND_DOCS_CONTENT = { - Show both the human-readable amount and denomination ### Transaction Flow -- Build transaction JSON (via MCP tools or manually) +- Build transaction JSON (via builder tools or manually) - Present to user for review before signing - User signs with their wallet (MetaMask for EVM, Keplr for Cosmos) - Broadcast the signed transaction @@ -46,7 +46,7 @@ const FRONTEND_DOCS_CONTENT = { collectionPatterns: `## Working with Collections ### Fetching Collection Data -Use the BitBadges API (via SDK or MCP \`query_collection\` tool): +Use the BitBadges API (via SDK or the builder's \`query_collection\` tool): \`\`\`typescript import { BitBadgesAPI } from 'bitbadgesjs-sdk'; @@ -79,7 +79,7 @@ const protocolData = JSON.parse(collection.customData || '{}'); transactionPatterns: `## Transaction Patterns ### Building and Submitting -1. Build transaction JSON using MCP tools or construct manually +1. Build transaction JSON using builder tools or construct manually 2. Validate with \`validate_transaction\` tool 3. Present to user for review 4. User signs with their wallet diff --git a/packages/bitbadgesjs-sdk/src/builder/resources/masterPrompt.ts b/packages/bitbadgesjs-sdk/src/builder/resources/masterPrompt.ts index a615cea8fe..dc02d33667 100644 --- a/packages/bitbadgesjs-sdk/src/builder/resources/masterPrompt.ts +++ b/packages/bitbadgesjs-sdk/src/builder/resources/masterPrompt.ts @@ -89,13 +89,13 @@ When the user wants to: - Update existing collection β†’ Use MsgUniversalUpdateCollection with actual collection ID - Create subscription β†’ Use MsgUniversalUpdateCollection with "Subscriptions" standard -## Build β†’ Audit β†’ Deploy Flow (MANDATORY) +## Build β†’ Review β†’ Deploy Flow (MANDATORY) After EVERY collection build, follow this pipeline: 1. **Build** β†’ Use per-field tools in parallel: set_standards, set_valid_token_ids, set_invariants, add_approval, set_permissions, set_default_balances, set_collection_metadata, set_token_metadata, add_alias_path, set_mint_escrow_coins -2. **Audit** β†’ audit_collection, validate_transaction -3. **Fix** β†’ Address findings, re-audit if needed -4. **Present** β†’ Show audit results to user with plain-language explanations +2. **Review** β†’ review_collection, validate_transaction +3. **Fix** β†’ Address findings, re-review if needed +4. **Present** β†’ Show review results to user with plain-language explanations 5. **Deploy** β†’ get_transaction to retrieve the final transaction, return for user review and submission **IMPORTANT: JSON Output Format** β€” When returning the final transaction JSON, do NOT just print it inline in the terminal. Terminal output often introduces formatting artifacts (line wrapping, ANSI codes, truncation) that break JSON parsing. Instead: @@ -136,7 +136,7 @@ The complete transaction is a JSON object with this EXACT structure: { "messages": [ - { "typeUrl": "/tokenization.MsgUniversalUpdateCollection", "value": {...} } + { "typeUrl": "/tokenization.MsgCreateCollection", "value": {...} } ], "memo": "Optional memo text", "fee": { @@ -149,7 +149,7 @@ The complete transaction is a JSON object with this EXACT structure: 1. All numbers as strings: "1" not 1, "0" not 0 2. UintRange format: { "start": "1", "end": "18446744073709551615" } 3. Max uint64: "18446744073709551615" (use for "forever" time ranges) -4. collectionId: "0": Creates new collection OR references just-created collection`, +4. New vs edit: use MsgCreateCollection to create a new collection, MsgUpdateCollection (with real collectionId) to edit one.`, messageTypes: `## Message Types @@ -157,13 +157,15 @@ The complete transaction is a JSON object with this EXACT structure: | typeUrl | Purpose | |---------|---------| -| /tokenization.MsgUniversalUpdateCollection | Create/update collections | +| /tokenization.MsgCreateCollection | Create a new collection. Keeps \`defaultBalances\` and \`invariants\`; no \`collectionId\` and no \`updateXxxTimeline\` flags. | +| /tokenization.MsgUpdateCollection | Edit an existing collection. Requires non-zero \`collectionId\` + \`updateXxxTimeline\` flags. Never set \`defaultBalances\` or \`invariants\` here. | | /tokenization.MsgCreateAddressLists | Create reusable address lists | | /tokenization.MsgUpdateUserApprovals | Update user-level approvals | +| /tokenization.MsgTransferTokens | Mint or transfer tokens (commonly paired after Create for initial distribution) | ### Message Selection -- **New collection**: MsgUniversalUpdateCollection with collectionId: "0" -- **Update collection**: MsgUniversalUpdateCollection with actual ID`, +- **New collection**: MsgCreateCollection (optionally followed by MsgTransferTokens for initial mint) +- **Edit existing**: MsgUpdateCollection with the real \`collectionId\` and the \`updateXxxTimeline\` flags for the fields you intend to change.`, msgUniversalUpdateCollection: `## MsgUniversalUpdateCollection - Complete Structure @@ -922,56 +924,76 @@ Before outputting JSON, verify: metadataPlaceholders: `## Metadata Placeholders System -The metadataPlaceholders system allows you to generate metadata for collection, token, or approval metadata that will be automatically uploaded to IPFS. +The metadataPlaceholders system allows you to generate metadata for collection, token, approval, and alias-path metadata that will be automatically uploaded to IPFS. + +The sidecar lives **inside the message**, at \`messages[i].value._meta.metadataPlaceholders\`. It is NOT a top-level sibling of \`messages\`. Every placeholder URI referenced by this message's body gets a matching entry in this map β€” no wrapper-level copy, no parallel shape. ### Format \`\`\`json { - "messages": [...], - "metadataPlaceholders": { - "ipfs://METADATA_COLLECTION": { - "name": "My Collection Name", - "description": "A description of the collection.", - "image": "https://example.com/image.png" - }, - "ipfs://METADATA_TOKEN_1": { - "name": "Token Name", - "description": "Token description.", - "image": "https://example.com/token.png" - }, - "ipfs://METADATA_APPROVAL_public-mint": { - "name": "Public Mint Approval", - "description": "Allows anyone to mint one token by paying 5 BADGE.", - "image": "" - }, - "ipfs://METADATA_ALIAS_PATH": { - "name": "vATOM", - "description": "Wrapped ATOM token for liquidity pools.", - "image": "https://example.com/vatom.png" + "messages": [ + { + "typeUrl": "/tokenization.MsgUniversalUpdateCollection", + "value": { + "collectionMetadata": { "uri": "ipfs://METADATA_COLLECTION", "customData": "" }, + "tokenMetadata": [ + { "uri": "ipfs://METADATA_TOKEN_1", "customData": "", "tokenIds": [{ "start": "1", "end": "1" }] } + ], + "collectionApprovals": [ + { "approvalId": "public-mint", "uri": "ipfs://METADATA_APPROVAL_public-mint", "...": "..." } + ], + "aliasPathsToAdd": [ + { "denom": "uvatom", "metadata": { "uri": "ipfs://METADATA_ALIAS_uvatom", "customData": "" }, "...": "..." } + ], + "_meta": { + "metadataPlaceholders": { + "ipfs://METADATA_COLLECTION": { + "name": "My Collection Name", + "description": "A description of the collection.", + "image": "https://example.com/image.png" + }, + "ipfs://METADATA_TOKEN_1": { + "name": "Token Name", + "description": "Token description.", + "image": "https://example.com/token.png" + }, + "ipfs://METADATA_APPROVAL_public-mint": { + "name": "Public Mint Approval", + "description": "Allows anyone to mint one token by paying 5 BADGE.", + "image": "" + }, + "ipfs://METADATA_ALIAS_uvatom": { + "name": "vATOM", + "description": "Wrapped ATOM token for liquidity pools.", + "image": "https://example.com/vatom.png" + } + } + } + } } - } + ] } \`\`\` ### How It Works -1. In your messages, use placeholder URIs like \`ipfs://METADATA_COLLECTION\`, \`ipfs://METADATA_TOKEN_1\`, etc. -2. In the \`metadataPlaceholders\` object, provide the actual metadata for each placeholder URI -3. The system will automatically replace the placeholder URIs with the provided metadata and upload to IPFS +1. In your message body, use placeholder URIs like \`ipfs://METADATA_COLLECTION\`, \`ipfs://METADATA_TOKEN_1\`, etc. +2. In the same message's \`value._meta.metadataPlaceholders\` object, provide the actual metadata for each placeholder URI. +3. The system will automatically replace the placeholder URIs with the provided metadata and upload to IPFS. 4. This works for: - Collection metadata (collectionMetadata.uri) - Token metadata (tokenMetadata[].uri) - Approval metadata (collectionApprovals[].uri) - - Alias path metadata (aliasPathsToAdd[].metadata.uri) + - Alias path metadata (aliasPathsToAdd[].metadata.uri and denomUnits[].metadata.uri) ### Important Notes -- Approval metadata should have \`image: ""\` (empty string) as approvals don't have images -- Collection and token metadata require \`name\`, \`description\`, and \`image\` -- You can use any placeholder URI format (e.g., \`ipfs://METADATA_APPROVAL_public-mint\`) -- The placeholder URI in the message must exactly match the key in \`metadataPlaceholders\` -- Always include proper descriptions ending with periods`, +- The sidecar lives at \`messages[0].value._meta.metadataPlaceholders\` β€” per-message, NOT at the tx wrapper level. +- Approval metadata MUST have \`image: ""\` (empty string) β€” approvals don't have images. +- Collection, token, and alias-path metadata require \`name\`, \`description\`, and \`image\`. +- The placeholder URI in the message body must exactly match the key in \`_meta.metadataPlaceholders\`. +- Always include proper descriptions ending with periods.`, evmQueryChallenges: `## EVM Query Challenges (v25+) diff --git a/packages/bitbadgesjs-sdk/src/builder/resources/recipes.ts b/packages/bitbadgesjs-sdk/src/builder/resources/recipes.ts index 9a71e7e906..49ff74d282 100644 --- a/packages/bitbadgesjs-sdk/src/builder/resources/recipes.ts +++ b/packages/bitbadgesjs-sdk/src/builder/resources/recipes.ts @@ -76,7 +76,7 @@ const api = new BitBadgesAPI({ apiUrl: 'https://api.bitbadges.io', apiKey: proce const balance = await api.getBalanceByAddress(collectionId, 'bb1address...'); console.log(balance.balances); // Balance[] -// Or via MCP: query_balance({ collectionId: "123", address: "bb1..." })` +// Or via the builder: query_balance({ collectionId: "123", address: "bb1..." })` }, { id: 'transfer-tokens', @@ -110,7 +110,7 @@ const transferMsg = { name: 'Verify Token Ownership (Gate Access)', description: 'Check if an address owns specific tokens β€” use for gating', tags: ['verify', 'gate', 'access', 'ownership', 'check'], - code: `// Via MCP tool: verify_ownership + code: `// Via builder tool: verify_ownership // Supports AND/OR/NOT logic for complex ownership checks // // verify_ownership({ @@ -135,7 +135,7 @@ const hasToken = balance.balances.some(b => tags: ['smart-token', 'ibc', 'usdc', 'wrapped', 'stablecoin', 'backing'], code: `// Smart Token Key Points: // -// 1. Use generate_backing_address MCP tool to get the deterministic backing address +// 1. Use generate_backing_address builder tool to get the deterministic backing address // for an IBC denom (e.g., USDC β†’ bb1backingaddr...) // // 2. Two approvals required (backing + unbacking). Transferable is common but optional: @@ -218,7 +218,7 @@ const bb1Address = ethToCosmos('0x1234...'); // BitBadges -> ETH const ethAddress = cosmosToEth('bb1...'); -// Or via MCP: convert_address({ address: "0x1234..." }) +// Or via the builder: convert_address({ address: "0x1234..." }) // IMPORTANT: This is byte-level conversion (same key, different encoding) // This is NOT public key derivation β€” both addresses share the same key pair` @@ -265,9 +265,9 @@ const withdrawMsg = { // Server-side verification: // 1. Client presents their address -// 2. Server verifies ownership via MCP or API +// 2. Server verifies ownership via the builder or API -// Via MCP: +// Via the builder: // verify_ownership({ // address: "bb1clientaddress...", // requirements: { diff --git a/packages/bitbadgesjs-sdk/src/builder/resources/registry.ts b/packages/bitbadgesjs-sdk/src/builder/resources/registry.ts index b6a241b1cc..2bd3a20686 100644 --- a/packages/bitbadgesjs-sdk/src/builder/resources/registry.ts +++ b/packages/bitbadgesjs-sdk/src/builder/resources/registry.ts @@ -2,7 +2,7 @@ * Central resource registry. * * Mirrors the tool registry (src/tools/registry.ts). Single source of truth - * for every MCP resource so the stdio server and library consumers + * for every builder resource so the stdio server and library consumers * (bitbadges-cli, bitbadgeschaind) see the same surface. * * A resource is a static-ish document addressed by URI. Unlike tools, reads @@ -47,7 +47,7 @@ export interface ResourceEntry { } // Inline ResourceInfo for the skills resource β€” it was defined ad-hoc inside -// server.ts before. Keeping the same metadata the MCP server used to expose. +// server.ts before. Keeping the same metadata the stdio server used to expose. const skillsAllInfo: ResourceInfo = { uri: 'bitbadges://skills/all', name: 'Skill Instructions', diff --git a/packages/bitbadgesjs-sdk/src/builder/resources/skillInstructions.ts b/packages/bitbadgesjs-sdk/src/builder/resources/skillInstructions.ts index 822a993215..94e758da71 100644 --- a/packages/bitbadgesjs-sdk/src/builder/resources/skillInstructions.ts +++ b/packages/bitbadgesjs-sdk/src/builder/resources/skillInstructions.ts @@ -36,8 +36,8 @@ export const SKILL_INSTRUCTIONS: SkillInstruction[] = [ - Unbacking fromListId uses "!Mint:backingAddress" syntax (excludes both Mint and backing address β€” meaning only regular holders can unback) - Backing address is deterministic β€” use generate_backing_address tool - Optional: Add "AI Agent Vault" to standards for AI Prompt tab (display-only) -- Alias path: symbol = base unit (e.g. "uvatom"), denomUnits = display units with decimals > 0 only, each denomUnit MUST have metadata with an image -- All alias path and denomUnit metadata MUST include an \`image\` field (token logo URL)`, +- Alias path: symbol = base unit (e.g. "uvatom"), denomUnits = display units with decimals > 0 only, each denomUnit MUST have PathMetadata with a placeholder uri +- PathMetadata ONLY has \`{ uri, customData }\` β€” do NOT include image/name/description fields. Set metadata.uri to a placeholder like \`"ipfs://METADATA_ALIAS_"\` and register the image/name/description in the metadataPlaceholders sidecar keyed by that URI`, instructions: `## Smart Token Configuration ### Mental Model: Three Phases @@ -195,9 +195,9 @@ MUST configure at least one alias path. Structure: "decimals": "6", "symbol": "vATOM", "isDefaultDisplay": true, - "metadata": { "uri": "ipfs://...", "customData": "" } + "metadata": { "uri": "ipfs://METADATA_ALIAS_uvatom_UNIT", "customData": "" } }], - "metadata": { "uri": "", "customData": "", "image": "https://example.com/token-logo.png" } + "metadata": { "uri": "ipfs://METADATA_ALIAS_uvatom", "customData": "" } }] } \`\`\` @@ -205,10 +205,9 @@ MUST configure at least one alias path. Structure: Rules: - symbol = base unit symbol (e.g., "uvatom") - denomUnits = display units with decimals > 0 ONLY (base decimals 0 is implicit) -- Each denomUnit MUST have metadata with an image field (often the same image as the base alias path) -- The alias path itself MUST have metadata with an image field β€” this is the token logo shown in the UI +- Each denomUnit and the path itself MUST have PathMetadata of the form \`{ uri, customData }\`. Use a placeholder URI like \`"ipfs://METADATA_ALIAS_"\` / \`"ipfs://METADATA_ALIAS__UNIT"\`. - isDefaultDisplay: true for the primary display unit -- **CRITICAL**: All metadata objects (alias path, denomUnits, cosmosCoinWrapperPaths) MUST include an \`image\` field with a valid URL. Missing images will be auto-fixed to a default but you should always provide a descriptive image. +- **CRITICAL**: PathMetadata has EXACTLY two fields β€” \`uri\` and \`customData\`. Never add \`image\`, \`name\`, or \`description\` here. Register the name, description, and image for each placeholder URI in the \`metadataPlaceholders\` sidecar returned alongside the transaction β€” the metadata auto-apply flow uploads the off-chain JSON and substitutes the placeholder URIs with real IPFS URIs after deploy. ### Cosmos Coin Wrapper (Optional) @@ -222,8 +221,8 @@ For wrapping native Cosmos SDK coins, use \`allowSpecialWrapping: true\` and \`c "sideA": { "amount": "1" }, "sideB": [{ "amount": "1", "tokenIds": [{ "start": "1", "end": "1" }], "ownershipTimes": [{ "start": "1", "end": "18446744073709551615" }] }] }, - "denomUnits": [{ "decimals": "6", "symbol": "ATOM", "isDefaultDisplay": true, "metadata": { "uri": "", "customData": "", "image": "https://example.com/token-logo.png" } }], - "metadata": { "uri": "", "customData": "", "image": "https://example.com/token-logo.png" }, + "denomUnits": [{ "decimals": "6", "symbol": "ATOM", "isDefaultDisplay": true, "metadata": { "uri": "ipfs://METADATA_WRAPPER_uatom_UNIT", "customData": "" } }], + "metadata": { "uri": "ipfs://METADATA_WRAPPER_uatom", "customData": "" }, "allowOverrideWithAnyValidToken": false }] } @@ -284,8 +283,8 @@ Require ownership of a 2FA token to withdraw. Add mustOwnTokens to the unbacking ### Metadata Guidance -- Collection metadata, token metadata, alias path metadata, and denomUnit metadata should all have descriptive names/descriptions and images -- Do NOT use lazy placeholder names like "Backing Approval" β€” write real user-facing descriptions explaining what each approval does +- Collection, token, alias path, and denomUnit metadata proto fields are ALL shaped as \`{ uri, customData }\`. Use placeholder URIs (\`ipfs://METADATA_COLLECTION\`, \`ipfs://METADATA_TOKEN_\`, \`ipfs://METADATA_ALIAS_\`, etc.) and register the real names, descriptions, and images in the \`metadataPlaceholders\` sidecar keyed by those URIs. The auto-apply flow uploads the off-chain JSON and substitutes the placeholder URIs with real uploaded URIs after deploy. +- Do NOT use lazy placeholder names like "Backing Approval" β€” write real user-facing descriptions inside the metadataPlaceholders entries explaining what each approval / collection / token does. ### Key Rules Summary @@ -585,9 +584,9 @@ When enabling liquidity pools for a collection, follow these requirements: "decimals": "6", "symbol": "vATOM", "isDefaultDisplay": true, - "metadata": { "uri": "", "customData": "", "image": "https://example.com/token-logo.png" } + "metadata": { "uri": "ipfs://METADATA_ALIAS_uvatom_UNIT", "customData": "" } }], - "metadata": { "uri": "", "customData": "", "image": "https://example.com/token-logo.png" } + "metadata": { "uri": "ipfs://METADATA_ALIAS_uvatom", "customData": "" } }] } \`\`\` @@ -596,7 +595,7 @@ When enabling liquidity pools for a collection, follow these requirements: - disablePoolCreation MUST be false (not true) - MUST configure at least one alias path (required for liquidity pools) -- All alias path and denomUnit metadata MUST include an \`image\` field with a valid URL (token logo) +- PathMetadata (on every aliasPath and denomUnit) is ONLY \`{ uri, customData }\`. Put the token logo in the off-chain JSON at the placeholder URI and register a matching entry in \`metadataPlaceholders\` β€” never put \`image\` on the proto. - Merkle challenges are NOT compatible with liquidity pools - This enables decentralized exchange (DEX) trading interfaces` }, @@ -1188,7 +1187,7 @@ When creating a Custom-2FA collection, follow these requirements: - Manager can add (mint) and remove (burn) addresses - No peer-to-peer transfers - Requires TWO approvals: manager-add (minting) + manager-remove (burn to bb1qqq...s7gvmv) -- After building, proceed with audit_collection + validate_transaction as normal`, +- After building, proceed with review_collection + validate_transaction as normal`, instructions: `## Address List Token Type Use per-field tools to create this collection. It requires: @@ -1201,7 +1200,7 @@ Use per-field tools to create this collection. It requires: This creates a token collection where list membership = owning x1 of token ID 1. The manager can add (mint) and remove (burn) addresses. No peer-to-peer transfers. -After building, proceed with audit_collection and validate_transaction as normal.` +After building, proceed with review_collection and validate_transaction as normal.` }, { id: 'bb-402', @@ -1829,7 +1828,7 @@ MUST include an alias path so tokens display nicely: }, "symbol": "", "denomUnits": [], - "metadata": { "uri": "", "customData": "", "image": "https://example.com/token-logo.png" } + "metadata": { "uri": "ipfs://METADATA_ALIAS_u", "customData": "" } }] \`\`\` @@ -2245,6 +2244,7 @@ Without this, the escrow has no funds and claims will fail. - Binary prediction market: "Will X happen by Y?" Users deposit USDC to mint paired YES+NO tokens. Trade YES↔NO on a liquidity pool. Verifier settles by voting. Winner redeems 1:1. - Token ID 1 = YES, Token ID 2 = NO (via alias paths with 6 decimals) - mintEscrowAddress holds all deposited USDC +- invariants: \\\`noForcefulPostMintTransfers: true\\\` β€” locks non-mint approvals (redeem, settlement, transferable) from using \\\`overridesFromOutgoingApprovals\\\` or \\\`overridesToIncomingApprovals\\\`. Non-mint approvals rely on \\\`defaultBalances.autoApproveSelfInitiatedOutgoingTransfers: true\\\` for outgoing-side auth and on the burn destination for incoming-side auth - All permissions frozen after creation - 7 approvals: paired mint, freely transferable, pre-settlement redeem, yes-wins, no-wins, push-yes, push-no - Alias paths for YES (token 1) and NO (token 2) with 6 decimals @@ -2842,6 +2842,7 @@ For bounties that require the verifier or submitter to hold a token from THIS co - maxScalingMultiplier: MAX_UINT for unrestricted scaling - Deposit coinTransfer.to = "Mint" (auto-resolves to escrow) - requireToEqualsInitiatedBy: true on deposit-refund (contributor receives their own refund token) +- invariants: \\\`noForcefulPostMintTransfers: true\\\` β€” the refund approval (non-mint) MUST NOT set \\\`overridesFromOutgoingApprovals\\\` or \\\`overridesToIncomingApprovals\\\` (both must be false or omitted). It relies on \\\`defaultBalances.autoApproveSelfInitiatedOutgoingTransfers: true\\\` for the outgoing side and on the burn destination for the incoming side. The deposit-refund / deposit-progress / success approvals ARE Mint-side and keep \\\`overridesFromOutgoingApprovals: true\\\` as the chain requires, with \\\`overridesToIncomingApprovals: false\\\` - All permissions frozen after creation - DON'T use votingChallenges β€” goal tracking is via mustOwnTokens, not voting - DON'T forget allowAmountScaling on ALL 4 approvals @@ -3070,10 +3071,14 @@ On-chain crowdfunding with automatic goal tracking. Contributors deposit coins a - initiatedByListId on mint-to-winner = seller address (only seller can accept) - maxNumTransfers = 1 on all approvals (one-shot) - overridesToIncomingApprovals: false on mint-to-winner (bidder's incoming approval handles payment) +- Burn approval has NO override flags β€” relies on defaultBalances autoApproveSelfInitiatedOutgoingTransfers + burn destination +- noForcefulPostMintTransfers: true in invariants (permanently locks out forceful transfers post-mint) +- After settlement, the mint-to-winner approval is auto-deleted via afterOneUse β€” protocol validators treat a missing mint-to-winner as a valid post-settlement state, not an error - All permissions frozen after creation - DON'T use coinTransfers on collection approvals β€” payment happens via intent matching - DON'T set initiatedByListId to "All" on mint-to-winner β€” must be seller - DON'T set transferTimes to forever β€” must be bounded to accept window +- DON'T put override flags on the burn approval β€” auction has noForcefulPostMintTransfers: true, so non-mint override flags would be invariant violations - DO use autoDeletionOptions.afterOneUse: true on mint-to-winner`, instructions: `## Auction Configuration @@ -3086,7 +3091,8 @@ A single-item auction where the seller creates a collection, bidders place inten - Token ID 1 = The auctioned item - Standard: "Auction" - validTokenIds: [{ start: "1", end: "1" }] -- invariants: { noCustomOwnershipTimes: true } +- invariants: \\\`{ noCustomOwnershipTimes: true, maxSupplyPerId: "0", noForcefulPostMintTransfers: true, disablePoolCreation: true }\\\` +- \\\`noForcefulPostMintTransfers: true\\\` locks the collection so no non-mint approval can ever use \\\`overridesFromOutgoingApprovals\\\` or \\\`overridesToIncomingApprovals\\\` β€” the burn approval below relies on \\\`defaultBalances\\\` auto-approve flags instead - All permissions frozen after creation ### Bidding Mechanism @@ -3134,17 +3140,12 @@ Bids must have transferTimes that stay valid through the END of the accept windo "toListId": "", "initiatedByListId": "All", "tokenIds": [{ "start": "1", "end": "1" }], - "transferTimes": [{ "start": "1", "end": "18446744073709551615" }], - "approvalCriteria": { - "overridesFromOutgoingApprovals": true, - "overridesToIncomingApprovals": true, - "coinTransfers": [], - "maxNumTransfers": { "overallMaxNumTransfers": "1" }, - "autoDeletionOptions": { "afterOneUse": true, "afterOverallMaxNumTransfers": true } - } + "transferTimes": [{ "start": "1", "end": "18446744073709551615" }] } \\\`\\\`\\\` +> **No \\\`approvalCriteria\\\` overrides:** because the invariants lock \\\`noForcefulPostMintTransfers: true\\\`, the burn approval MUST NOT set \\\`overridesFromOutgoingApprovals\\\` or \\\`overridesToIncomingApprovals\\\`. It relies on \\\`defaultBalances.autoApproveSelfInitiatedOutgoingTransfers: true\\\` for the outgoing side and on the burn destination for the incoming side. + ### Auction Flow 1. **Create**: Seller creates auction collection with 2 approvals. No token is minted yet. @@ -3156,7 +3157,7 @@ Bids must have transferTimes that stay valid through the END of the accept windo 1. \\\`set_valid_token_ids\\\` β€” set [{ start: "1", end: "1" }] 2. \\\`set_standards\\\` β€” set ["Auction"] -3. \\\`set_invariants\\\` β€” set { noCustomOwnershipTimes: true } +3. \\\`set_invariants\\\` β€” set \\\`{ noCustomOwnershipTimes: true, maxSupplyPerId: "0", noForcefulPostMintTransfers: true, disablePoolCreation: true }\\\` 4. \\\`add_approval\\\` x2 β€” mint-to-winner, burn 5. \\\`set_collection_metadata\\\` β€” auction title, description, image 6. \\\`set_token_metadata\\\` β€” token 1 metadata (the item being auctioned) @@ -3170,6 +3171,8 @@ Bids must have transferTimes that stay valid through the END of the accept windo - DON'T set initiatedByListId to "All" on mint-to-winner β€” only the seller can accept bids - DON'T set transferTimes to forever β€” MUST be bounded to accept window (bidDeadline β†’ bidDeadline + acceptWindow) - DON'T set overridesToIncomingApprovals to true on mint-to-winner β€” must be false so the bidder's incoming approval (payment intent) is checked +- DON'T add override flags to the burn approval β€” the invariant \\\`noForcefulPostMintTransfers: true\\\` rejects \\\`overridesFromOutgoingApprovals\\\` or \\\`overridesToIncomingApprovals\\\` on any non-mint approval +- DON'T worry if the mint-to-winner approval is absent after settlement β€” \\\`autoDeletionOptions.afterOneUse: true\\\` removes it after the first successful mint, and the protocol validator accepts that as a valid post-settlement state - DON'T create a separate mint-to-seller approval β€” the token should not exist until the seller accepts a bid - DON'T forget autoDeletionOptions on mint-to-winner β€” without afterOneUse: true, the seller could mint to multiple bidders - DON'T forget that bids must have transferTimes valid through the END of the accept window, not just the bid deadline` @@ -3186,7 +3189,7 @@ Bids must have transferTimes that stay valid through the END of the accept windo - Each purchase approval: fromListId="Mint", toListId="All" (or burn address if burn-on-purchase), 1 coinTransfer paying the store address - Payment goes directly to store address (NOT to escrow) β€” overrideFromWithApproverAddress: false - Each product has independent price, supply limit (maxNumTransfers), and burn-on-purchase toggle -- predeterminedBalances.startBalances: 1x of that product's token ID +- predeterminedBalances.incrementedBalances.startBalances: 1x of that product's token ID - Optional burn approval: !Mint β†’ burn address, no coinTransfers - invariants: { noCustomOwnershipTimes: true } - All permissions frozen after creation diff --git a/packages/bitbadgesjs-sdk/src/builder/resources/tokenRegistry.ts b/packages/bitbadgesjs-sdk/src/builder/resources/tokenRegistry.ts index ff3a79b415..3802c43ad9 100644 --- a/packages/bitbadgesjs-sdk/src/builder/resources/tokenRegistry.ts +++ b/packages/bitbadgesjs-sdk/src/builder/resources/tokenRegistry.ts @@ -64,7 +64,7 @@ export function formatTokenRegistryForDisplay(): string { output += '| Symbol | IBC Denom | Decimals | Backing Address |\n'; output += '|--------|-----------|----------|------------------|\n'; for (const token of content.tokens) { - output += `| ${token.symbol} | ${token.ibcDenom.slice(0, 20)}... | ${token.decimals} | ${token.backingAddress.slice(0, 15)}... |\n`; + output += `| ${token.symbol} | ${token.ibcDenom} | ${token.decimals} | ${token.backingAddress} |\n`; } output += '\n## Native Coins\n\n'; diff --git a/packages/bitbadgesjs-sdk/src/builder/resources/workflows.ts b/packages/bitbadgesjs-sdk/src/builder/resources/workflows.ts index 05f8b23657..1191758b0b 100644 --- a/packages/bitbadgesjs-sdk/src/builder/resources/workflows.ts +++ b/packages/bitbadgesjs-sdk/src/builder/resources/workflows.ts @@ -13,7 +13,7 @@ export const workflowsResourceInfo = { const WORKFLOWS_CONTENT = { overview: `# BitBadges Workflow Chains -Multi-step tool sequences for common operations. Each workflow lists the exact MCP tools to call in order.`, +Multi-step tool sequences for common operations. Each workflow lists the exact builder tools to call in order.`, bb402Acquire: `## BB-402: Acquire Tokens to Gain Access @@ -213,8 +213,8 @@ Step 1: Build the collection β†’ Use per-field tools: set_standards, set_valid_token_ids, set_invariants, add_approval, set_permissions, set_default_balances, set_collection_metadata, set_token_metadata, add_alias_path, set_mint_escrow_coins β†’ Use get_transaction to retrieve the built transaction -Step 2: Audit - β†’ audit_collection(collection: result.transaction, context: "nft art collection") +Step 2: Review + β†’ review_collection(collection: result.transaction, context: "nft art collection") β†’ Review all CRITICAL findings β€” these MUST be fixed β†’ Review all WARNING findings β€” decide if they're intentional @@ -223,8 +223,8 @@ Step 3: Fix critical issues β†’ Common fixes: add overridesFromOutgoingApprovals, add supply limits, lock permissions, set autoApproveAllIncomingTransfers -Step 4: Re-audit - β†’ audit_collection(collection: fixedTransaction) +Step 4: Re-review + β†’ review_collection(collection: fixedTransaction) β†’ Verify no more CRITICAL findings Step 5: Deploy @@ -232,10 +232,10 @@ Step 5: Deploy β†’ Return transaction JSON for user to sign with their wallet and broadcast \`\`\` -Can also audit existing on-chain collections: +Can also review existing on-chain collections: \`\`\` Step 1: β†’ query_collection(collectionId) -Step 2: β†’ audit_collection(collection: queryResult, context: "description") +Step 2: β†’ review_collection(collection: queryResult, context: "description") Step 3: β†’ Present findings to user \`\`\`` }; diff --git a/packages/bitbadgesjs-sdk/src/builder/sdk/apiClient.ts b/packages/bitbadgesjs-sdk/src/builder/sdk/apiClient.ts index 4eb59111f1..101f737073 100644 --- a/packages/bitbadgesjs-sdk/src/builder/sdk/apiClient.ts +++ b/packages/bitbadgesjs-sdk/src/builder/sdk/apiClient.ts @@ -16,10 +16,28 @@ export interface ApiResponse { } /** - * Get the API key from environment variable + * Get the API key. Resolution order: + * 1. `BITBADGES_API_KEY` environment variable (highest priority β€” overrides + * everything so CI / one-off shells can swap keys without touching disk). + * 2. `~/.bitbadges/config.json` `apiKey` field, populated via + * `bitbadges-cli config set apiKey `. Persistent default for + * day-to-day use. + * + * Imported lazily so this module stays usable in environments where the + * filesystem helper isn't available (e.g. browser bundles via the SDK). */ export function getApiKey(): string | undefined { - return process.env.BITBADGES_API_KEY; + const envKey = process.env.BITBADGES_API_KEY; + if (envKey) return envKey; + try { + // Lazy require to avoid pulling fs/os into non-Node consumers of the SDK. + // The CLI config helper is the source of truth for the on-disk format. + // eslint-disable-next-line @typescript-eslint/no-var-requires + const { getConfigApiKey } = require('../../cli/utils/config.js'); + return getConfigApiKey(); + } catch { + return undefined; + } } /** @@ -179,27 +197,36 @@ export async function getBalanceForToken( // Simulation APIs // ============================================ +/** + * Agent-JSON single-tx simulate request. Sent to the indexer's + * `/api/v0/simulate` Path 2. The indexer proto-encodes server-side + * via `encodeMsgsFromJson` and forwards to the LCD simulate endpoint. + * + * Previously this type wrapped a single tx in a `{txs: [...]}` "batch" + * envelope. The batch wrapper was never actually used as a batch + * (always exactly 1 element) and has been removed. + */ export interface SimulateRequest { - txs: Array<{ - context: { - address: string; - chain: string; - }; - messages: unknown[]; - memo?: string; - fee?: { - amount: Array<{ denom: string; amount: string }>; - gas: string; - }; - }>; + messages: unknown[]; + memo?: string; + fee?: { + amount: Array<{ denom: string; amount: string }>; + gas: string; + }; + /** bb1… address of the tx signer. Optional if every message already + * has the same `value.creator`. */ + creatorAddress?: string; } +/** + * Raw LCD simulate response β€” what the indexer forwards back from + * `/cosmos/tx/v1beta1/simulate`. Consumers typically read + * `gas_info.gas_used` + `result.events`. + */ export interface SimulateResponse { - results: Array<{ - gasUsed?: string; - events?: unknown[]; - error?: string; - }>; + gas_info?: { gas_used?: string; gas_wanted?: string }; + result?: { events?: unknown[] }; + error?: string; } export async function simulateTx( diff --git a/packages/bitbadgesjs-sdk/src/builder/server.ts b/packages/bitbadgesjs-sdk/src/builder/server.ts index f390526d57..60674ba8ae 100644 --- a/packages/bitbadgesjs-sdk/src/builder/server.ts +++ b/packages/bitbadgesjs-sdk/src/builder/server.ts @@ -1,9 +1,12 @@ /** - * BitBadges Builder MCP Server Configuration + * BitBadges Builder β€” Model Context Protocol (MCP) stdio transport. * - * Tool handlers live in `src/tools/registry.ts` β€” this file is just the MCP - * protocol shim. ListTools and CallTool both read from the same registry, so - * the CLI and any other library consumers see exactly the same tool surface. + * Wraps the builder tool/resource registry in the MCP protocol so Claude + * Desktop and other MCP clients can reach it over stdio. Tool handlers + * themselves live in `src/builder/tools/registry.ts` and are used unchanged + * by the in-process CLI path (`bitbadges-cli builder`) and by library + * consumers (indexer, chain binary delegation, etc.) β€” the MCP server is + * just one presentation layer among several. */ import { Server } from '@modelcontextprotocol/sdk/server/index.js'; @@ -18,12 +21,12 @@ import { toolRegistry, listTools, callTool } from './tools/registry.js'; import { listResources, readResource } from './resources/registry.js'; /** - * Create and configure the MCP server. + * Create and configure the BitBadges Builder MCP stdio server. */ export function createServer(): Server { const server = new Server( { - name: 'bitbadges-builder-mcp', + name: 'bitbadges-builder', version: '1.0.0' }, { @@ -40,7 +43,8 @@ export function createServer(): Server { })); // CallTool β€” dispatch through the registry. The registry never throws, so - // we just wrap its output in the MCP content-block shape. + // we just wrap its output in the MCP content-block shape expected by the + // wire protocol. server.setRequestHandler(CallToolRequestSchema, async (request) => { const { name, arguments: args } = request.params; const { text, isError } = await callTool(name, args); diff --git a/packages/bitbadgesjs-sdk/src/builder/session/fileStore.ts b/packages/bitbadgesjs-sdk/src/builder/session/fileStore.ts new file mode 100644 index 0000000000..da70467b95 --- /dev/null +++ b/packages/bitbadgesjs-sdk/src/builder/session/fileStore.ts @@ -0,0 +1,93 @@ +/** + * File-backed session persistence. + * + * The in-memory session store (`sessionState.ts`) is ephemeral β€” it lives for + * the lifetime of one process. CLI consumers run many one-shot processes, one + * per tool call, and need session state to survive across them. These helpers + * bridge the two: snapshot the in-memory session to `~/.bitbadges/sessions/.json` + * after each call, restore it from disk before the next. + * + * All functions default to `DEFAULT_SESSIONS_DIR` but accept an override so + * agents can isolate sessions per workspace, tests can point at a tmpdir, and + * no caller has to hardcode the path. + * + * These helpers never throw for "no session found" β€” missing files are a + * normal condition (first call on a fresh id). They do throw on corrupt JSON + * and I/O failures, since those are real problems a caller should see. + */ + +import fs from 'fs'; +import os from 'os'; +import path from 'path'; + +import { exportSession, importSession } from './sessionState.js'; + +/** Default on-disk location for persisted session snapshots. */ +export const DEFAULT_SESSIONS_DIR = path.join(os.homedir(), '.bitbadges', 'sessions'); + +/** Resolve the JSON file path for a session id under the given directory. */ +export function sessionFilePath(id: string, dir: string = DEFAULT_SESSIONS_DIR): string { + return path.join(dir, `${id}.json`); +} + +/** + * Load a persisted session snapshot from disk into the in-memory store. + * + * If no file exists at the expected path, this is a no-op β€” the caller will + * fall through to `getOrCreateSession` which initializes a blank template on + * first mutation. Throws on corrupt JSON or read failures so the caller can + * surface the error. + */ +export function loadSessionFromDisk(id: string, dir: string = DEFAULT_SESSIONS_DIR): void { + const file = sessionFilePath(id, dir); + if (!fs.existsSync(file)) return; + let snapshot: any; + try { + snapshot = JSON.parse(fs.readFileSync(file, 'utf-8')); + } catch (err) { + throw new Error(`Failed to load session "${id}" from ${file}: ${(err as Error).message}`); + } + importSession(id, snapshot); +} + +/** + * Snapshot the in-memory session for the given id and write it to disk. + * + * No-op if the session doesn't exist in memory β€” a tool call that never + * touched session state shouldn't create an empty file on disk. Creates the + * target directory if it doesn't exist yet. + */ +export function saveSessionToDisk(id: string, dir: string = DEFAULT_SESSIONS_DIR): void { + const snapshot = exportSession(id); + if (snapshot === null) return; + fs.mkdirSync(dir, { recursive: true }); + fs.writeFileSync(sessionFilePath(id, dir), JSON.stringify(snapshot, null, 2)); +} + +/** List persisted session ids under the given directory. Empty array if dir doesn't exist. */ +export function listSessionFilesOnDisk(dir: string = DEFAULT_SESSIONS_DIR): string[] { + if (!fs.existsSync(dir)) return []; + return fs + .readdirSync(dir) + .filter((file) => file.endsWith('.json')) + .map((file) => file.replace(/\.json$/, '')); +} + +/** + * Read the raw JSON body of a persisted session snapshot, or null if not + * present. Returns the string as-is so callers can either pretty-print or + * re-parse. + */ +export function readSessionFileRaw(id: string, dir: string = DEFAULT_SESSIONS_DIR): string | null { + const file = sessionFilePath(id, dir); + if (!fs.existsSync(file)) return null; + return fs.readFileSync(file, 'utf-8'); +} + +/** Delete the persisted session file for the given id. No-op if absent. */ +export function resetSessionFile(id: string, dir: string = DEFAULT_SESSIONS_DIR): void { + const file = sessionFilePath(id, dir); + if (fs.existsSync(file)) { + fs.unlinkSync(file); + } +} diff --git a/packages/bitbadgesjs-sdk/src/builder/session/index.ts b/packages/bitbadgesjs-sdk/src/builder/session/index.ts index 2ae81d7ea1..24c65a1017 100644 --- a/packages/bitbadgesjs-sdk/src/builder/session/index.ts +++ b/packages/bitbadgesjs-sdk/src/builder/session/index.ts @@ -1 +1,2 @@ export * from './sessionState.js'; +export * from './fileStore.js'; diff --git a/packages/bitbadgesjs-sdk/src/builder/session/sessionState.ts b/packages/bitbadgesjs-sdk/src/builder/session/sessionState.ts index 969bbd4c19..0ff34c562e 100644 --- a/packages/bitbadgesjs-sdk/src/builder/session/sessionState.ts +++ b/packages/bitbadgesjs-sdk/src/builder/session/sessionState.ts @@ -1,9 +1,9 @@ /** - * In-memory session state for the MCP builder v2. + * In-memory session state for the builder v2. * * Uses a Map keyed by sessionId for per-request isolation. * Each session holds a blank MsgUniversalUpdateCollection template that per-field tools mutate. - * Auto-creates on first mutation. State is ephemeral β€” scoped to the MCP process lifetime. + * Auto-creates on first mutation. State is ephemeral β€” scoped to the process lifetime. * * Design principles: * - Set tools replace the entire field @@ -33,15 +33,31 @@ export interface SessionTransaction { typeUrl: string; value: Record; }>; - metadataPlaceholders: Record; /** Approval IDs that existed when the session was initialized (for update flows) */ originalApprovalIds?: Set; } +/** + * Get or create the per-msg metadataPlaceholders sidecar on the first + * message's value. Canonical location: `session.messages[0].value._meta.metadataPlaceholders`. + * There is no session-level top-level sidecar β€” the sidecar belongs to + * the msg whose body references the placeholder URIs. + */ +export function getMsgPlaceholders(session: SessionTransaction): Record { + const value = session.messages[0].value; + if (!value._meta || typeof value._meta !== 'object') { + value._meta = {}; + } + if (!value._meta.metadataPlaceholders || typeof value._meta.metadataPlaceholders !== 'object') { + value._meta.metadataPlaceholders = {}; + } + return value._meta.metadataPlaceholders; +} + // Per-session state keyed by sessionId const sessions = new Map(); -// Default sessionId when none is provided (MCP-direct / single-user mode) +// Default sessionId when none is provided (builder-direct / single-user mode) const DEFAULT_SESSION_ID = '__default__'; function resolveSessionId(sessionId?: string): string { @@ -100,8 +116,7 @@ export function getOrCreateSession(sessionId?: string, creatorAddress?: string): aliasPathsToAdd: [], cosmosCoinWrapperPathsToAdd: [] } - }], - metadataPlaceholders: {} + }] }; sessions.set(sid, session); } @@ -141,7 +156,6 @@ export function exportSession(sessionId?: string): any | null { if (!session) return null; return { messages: session.messages, - metadataPlaceholders: session.metadataPlaceholders, originalApprovalIds: session.originalApprovalIds ? Array.from(session.originalApprovalIds) : undefined }; } @@ -159,7 +173,6 @@ export function importSession(sessionId: string | undefined, snapshot: any): voi } const restored: SessionTransaction = { messages: snapshot.messages, - metadataPlaceholders: snapshot.metadataPlaceholders || {}, originalApprovalIds: Array.isArray(snapshot.originalApprovalIds) ? new Set(snapshot.originalApprovalIds) : undefined @@ -248,7 +261,7 @@ export function setCollectionMetadata(sessionId: string | undefined, name: strin const uri = 'ipfs://METADATA_COLLECTION'; value.collectionMetadata = { uri, customData: '' }; value.updateCollectionMetadata = true; - s.metadataPlaceholders[uri] = { name, description, image: sanitizeImage(image) }; + getMsgPlaceholders(s)[uri] = { name, description, image: sanitizeImage(image) }; } export function setTokenMetadata( @@ -277,7 +290,7 @@ export function setTokenMetadata( value.tokenMetadata = [...(value.tokenMetadata || []), entry]; } value.updateTokenMetadata = true; - s.metadataPlaceholders[uriKey] = { name, description, image: sanitizeImage(image) }; + getMsgPlaceholders(s)[uriKey] = { name, description, image: sanitizeImage(image) }; } // ============================================================ @@ -306,9 +319,10 @@ export function addApproval(sessionId: string | undefined, approval: Record c.toUpperCase()); - s.metadataPlaceholders[uri] = { name: approvalTitle, description: '', image: '' }; + placeholders[uri] = { name: approvalTitle, description: '', image: '' }; } } @@ -327,7 +341,7 @@ export function removeApproval(sessionId: string | undefined, approvalId: string export function setApprovalMetadata(sessionId: string | undefined, approvalId: string, name: string, description: string, image: string = ''): void { const s = getOrCreateSession(sessionId); const uri = `ipfs://METADATA_APPROVAL_${approvalId}`; - s.metadataPlaceholders[uri] = { name, description, image }; + getMsgPlaceholders(s)[uri] = { name, description, image }; const value = s.messages[0].value; const approvals: any[] = value.collectionApprovals || []; diff --git a/packages/bitbadgesjs-sdk/src/builder/tools/builders/auditCollection.ts b/packages/bitbadgesjs-sdk/src/builder/tools/builders/auditCollection.ts deleted file mode 100644 index caba0a440c..0000000000 --- a/packages/bitbadgesjs-sdk/src/builder/tools/builders/auditCollection.ts +++ /dev/null @@ -1,32 +0,0 @@ -/** - * Tool: audit_collection - * Audits a MsgUniversalUpdateCollection for security risks, design flaws, and common gotchas. - * Returns categorized findings with severity levels. - * - * Logic delegated to bitbadgesjs-sdk's auditCollection(). - */ - -import { auditCollection, type AuditResult } from '../../../core/audit.js'; - -export const auditCollectionTool = { - name: 'audit_collection', - description: 'Audit a collection transaction or on-chain collection for security risks, design flaws, and common gotchas. Pass either a MsgUniversalUpdateCollection message or a raw collection object. Returns categorized findings with severity levels (critical/warning/info).', - inputSchema: { - type: 'object' as const, - properties: { - collection: { - type: 'object', - description: 'The collection to audit. Can be: (1) A MsgUniversalUpdateCollection message object with typeUrl and value, (2) The value field directly, or (3) A raw collection object from query_collection.' - }, - context: { - type: 'string', - description: 'Optional context about the intended use case (e.g., "NFT art collection", "stablecoin vault", "subscription token"). Helps tailor findings.' - } - }, - required: ['collection'] - } -}; - -export function handleAuditCollection(input: { collection: Record; context?: string }): AuditResult { - return auditCollection(input); -} diff --git a/packages/bitbadgesjs-sdk/src/builder/tools/builders/buildClaim.ts b/packages/bitbadgesjs-sdk/src/builder/tools/builders/buildClaim.ts index 512b7ff611..6f958cd0ba 100644 --- a/packages/bitbadgesjs-sdk/src/builder/tools/builders/buildClaim.ts +++ b/packages/bitbadgesjs-sdk/src/builder/tools/builders/buildClaim.ts @@ -209,14 +209,17 @@ export function handleBuildClaim(input: BuildClaimInput): BuildClaimResult { claims: [claim] }; - // Build helpful nextSteps + // Build helpful nextSteps. The submit URL respects BITBADGES_API_URL so + // agents pointed at testnet / local indexers see the correct endpoint + // in the rendered instructions instead of a hardcoded mainnet URL. + const apiUrl = process.env.BITBADGES_API_URL || 'https://api.bitbadges.io'; const nextStepsParts: string[] = []; nextStepsParts.push( '## How to submit this claim\n' + 'Send a POST request to the BitBadges API:\n' + '```\n' + - 'POST https://api.bitbadges.io/api/v0/claims\n' + + `POST ${apiUrl}/api/v0/claims\n` + 'Authorization: Bearer \n' + 'Content-Type: application/json\n' + 'Body: \n' + diff --git a/packages/bitbadgesjs-sdk/src/builder/tools/builders/explainCollection.ts b/packages/bitbadgesjs-sdk/src/builder/tools/builders/explainCollection.ts index a8f6255168..c7958b58c7 100644 --- a/packages/bitbadgesjs-sdk/src/builder/tools/builders/explainCollection.ts +++ b/packages/bitbadgesjs-sdk/src/builder/tools/builders/explainCollection.ts @@ -5,7 +5,7 @@ * No API key required β€” purely local analysis. * * Uses interpretCollection() from bitbadgesjs-sdk as the core explanation engine, - * with MCP-specific input normalization and developer/auditor extras on top. + * with builder-specific input normalization and developer/auditor extras on top. */ import { interpretCollection } from '../../../core/interpret.js'; @@ -57,7 +57,7 @@ interface ApprovalData { } /** - * Unwraps the raw MCP input into a flat collection-like object. + * Unwraps the raw tool input into a flat collection-like object. * Handles build results, MsgUniversalUpdateCollection, on-chain query results, etc. */ function normalizeInput(input: Record): Record { diff --git a/packages/bitbadgesjs-sdk/src/builder/tools/builders/index.ts b/packages/bitbadgesjs-sdk/src/builder/tools/builders/index.ts index 6a96c3dfe0..8cd1c849f0 100644 --- a/packages/bitbadgesjs-sdk/src/builder/tools/builders/index.ts +++ b/packages/bitbadgesjs-sdk/src/builder/tools/builders/index.ts @@ -2,7 +2,6 @@ * High-level builders index */ -export * from './auditCollection.js'; -export * from './verifyStandards.js'; export * from './explainCollection.js'; export * from './buildClaim.js'; +export * from './reviewCollection.js'; diff --git a/packages/bitbadgesjs-sdk/src/builder/tools/builders/reviewCollection.ts b/packages/bitbadgesjs-sdk/src/builder/tools/builders/reviewCollection.ts new file mode 100644 index 0000000000..b8139442e3 --- /dev/null +++ b/packages/bitbadgesjs-sdk/src/builder/tools/builders/reviewCollection.ts @@ -0,0 +1,39 @@ +/** + * Tool: review_collection + * Unified deterministic review β€” combines audit + standards + UX checks + * into a single ReviewResult. This is the only review entry point + * exposed by the builder tool surface. + * + * Logic delegated to bitbadgesjs-sdk's reviewCollection(). + */ + +import { reviewCollection, type ReviewResult, type ReviewContext } from '../../../core/review.js'; + +export const reviewCollectionTool = { + name: 'review_collection', + description: + 'Run the unified deterministic review on a collection transaction or on-chain collection. Returns audit + standards + UX findings merged into one ReviewResult with a single verdict. Every finding has a stable `code`, `severity`, `source`, `category`, plus three required localized strings: `title`, `detail`, `recommendation` (each shaped { en, ... }). Accepts an optional context (onChainCollection, skipSources, hideAgentOnly).', + inputSchema: { + type: 'object' as const, + properties: { + collection: { + type: 'object', + description: + 'The collection to review. Accepts a MsgUniversalUpdateCollection message, its value field, a transaction { messages: [...] }, or a raw on-chain collection object.' + }, + context: { + type: 'object', + description: + 'Optional review context. Fields: onChainCollection (object, prior on-chain state used by diff checks and update-only suppressions), skipSources (array of "audit" | "standards" | "ux" to skip whole families), hideAgentOnly (boolean, drops findings tagged agentOnly β€” human consumers set this).' + } + }, + required: ['collection'] + } +}; + +export function handleReviewCollection(input: { + collection: Record; + context?: ReviewContext; +}): ReviewResult { + return reviewCollection(input.collection, input.context); +} diff --git a/packages/bitbadgesjs-sdk/src/builder/tools/builders/verifyStandards.ts b/packages/bitbadgesjs-sdk/src/builder/tools/builders/verifyStandards.ts deleted file mode 100644 index bbf64aa84b..0000000000 --- a/packages/bitbadgesjs-sdk/src/builder/tools/builders/verifyStandards.ts +++ /dev/null @@ -1,16 +0,0 @@ -/** - * Deterministic skill/standard verification. - * - * Logic delegated to bitbadgesjs-sdk's verifyStandardsCompliance() and - * formatVerificationResult(). - * - * This file re-exports those functions so the rest of the MCP codebase - * (server.ts, builders/index.ts) can continue importing from this path. - */ - -export { - verifyStandardsCompliance, - formatVerificationResult, - type VerificationResult, - type StandardViolation -} from '../../../core/verify-standards.js'; diff --git a/packages/bitbadgesjs-sdk/src/builder/tools/queries/simulateTransaction.ts b/packages/bitbadgesjs-sdk/src/builder/tools/queries/simulateTransaction.ts index 2539ddef1c..78e832cb04 100644 --- a/packages/bitbadgesjs-sdk/src/builder/tools/queries/simulateTransaction.ts +++ b/packages/bitbadgesjs-sdk/src/builder/tools/queries/simulateTransaction.ts @@ -64,6 +64,89 @@ function bigintToString(obj: unknown): unknown { return obj; } +/** + * Reusable simulate helper used by both the MCP tool wrapper above and the + * CLI's `builder simulate` / templates auto-simulate paths. Sends the + * agent-JSON single-tx shape (`{messages, memo, fee, creatorAddress}`) + * to the indexer's `/api/v0/simulate` endpoint and normalizes the raw + * LCD response into a `SimulateTransactionResult` so all callers render + * identically through the terminal helpers. + */ +export async function simulateMessages(params: { + messages: unknown[]; + memo?: string; + fee?: { + amount: Array<{ denom: string; amount: string }>; + gas: string; + }; + creatorAddress?: string; + /** Override the resolved API key (e.g. CLI `--network local` flow). */ + apiKey?: string; + /** Override the resolved API base URL (e.g. CLI `--url http://...`). */ + apiUrl?: string; +}): Promise { + try { + if (!params.messages || !Array.isArray(params.messages) || params.messages.length === 0) { + return { success: false, error: 'Invalid transaction: empty or missing messages array' }; + } + + const response = await simulateTx( + { + messages: params.messages, + memo: params.memo || '', + fee: params.fee || { amount: [{ denom: 'ubadge', amount: '5000' }], gas: '500000' }, + creatorAddress: params.creatorAddress + }, + // Pass per-call override config through to apiRequest so the + // CLI's --network/--url flags can hit a local indexer without + // requiring environment variables. + params.apiKey || params.apiUrl ? { apiKey: params.apiKey, apiUrl: params.apiUrl } : undefined + ); + + if (!response.success) { + return { success: false, error: response.error }; + } + + // Path 2 response is the raw LCD simulate shape: + // { gas_info: { gas_used, gas_wanted }, result: { events: [...] } } + // On a chain-level rejection the indexer surfaces the message as + // response.data.error (via BitBadgesError passthrough) or as an + // error field on data itself. + const data: any = response.data; + const chainError = data?.error || data?.message; + if (chainError && !data?.gas_info && !data?.result) { + return { success: true, valid: false, simulationError: chainError }; + } + + const gasUsed: string | undefined = data?.gas_info?.gas_used; + const events = ((data?.result?.events || []) as SimulationEvent[]); + let parsedEvents: unknown = undefined; + let netChanges: unknown = undefined; + try { + const parsed: ParsedSimulationEvents = parseSimulationEvents(events, []); + const net: NetBalanceChanges = calculateNetChanges(parsed); + parsedEvents = bigintToString(parsed); + netChanges = bigintToString(net); + } catch { + // If parsing fails, still return raw events β€” don't break the caller + } + + return { + success: true, + valid: true, + gasUsed, + events, + parsedEvents, + netChanges + }; + } catch (error) { + return { + success: false, + error: 'Failed to simulate transaction: ' + (error instanceof Error ? error.message : String(error)) + }; + } +} + export async function handleSimulateTransaction(input: SimulateTransactionInput): Promise { try { // Normalize: accept either a pre-parsed object or a JSON string @@ -90,71 +173,11 @@ export async function handleSimulateTransaction(input: SimulateTransactionInput) }; } - // Validate basic structure - if (!tx.messages || !Array.isArray(tx.messages)) { - return { - success: false, - error: 'Invalid transaction: Missing "messages" array' - }; - } - - // Create simulation request - const response = await simulateTx({ - txs: [{ - context: { - address: 'bb1simulation', - chain: 'eth' - }, - messages: tx.messages, - memo: tx.memo || '', - fee: tx.fee || { - amount: [{ denom: 'ubadge', amount: '5000' }], - gas: '500000' - } - }] + return simulateMessages({ + messages: tx.messages, + memo: tx.memo, + fee: tx.fee }); - - if (!response.success) { - return { - success: false, - error: response.error - }; - } - - const result = response.data?.results?.[0]; - - if (result?.error) { - return { - success: true, - valid: false, - simulationError: result.error - }; - } - - // Parse events into structured output - const events = (result?.events || []) as SimulationEvent[]; - let parsedEvents: unknown = undefined; - let netChanges: unknown = undefined; - - try { - const parsed: ParsedSimulationEvents = parseSimulationEvents(events, []); - const net: NetBalanceChanges = calculateNetChanges(parsed); - - // Convert bigint to string for JSON serialization - parsedEvents = bigintToString(parsed); - netChanges = bigintToString(net); - } catch { - // If parsing fails, still return raw events β€” don't break the tool - } - - return { - success: true, - valid: true, - gasUsed: result?.gasUsed, - events: result?.events, - parsedEvents, - netChanges - }; } catch (error) { return { success: false, diff --git a/packages/bitbadgesjs-sdk/src/builder/tools/registry.spec.ts b/packages/bitbadgesjs-sdk/src/builder/tools/registry.spec.ts new file mode 100644 index 0000000000..8a953c000d --- /dev/null +++ b/packages/bitbadgesjs-sdk/src/builder/tools/registry.spec.ts @@ -0,0 +1,111 @@ +/** + * Tests for the centralized tool dispatch in `callTool` β€” specifically the + * pre-flight argument validator and the Zod-error formatter that round 4 + * added to catch LLM-agent footguns: + * + * - missing required field β†’ handler crashes with "Cannot read properties + * of undefined" instead of "Missing required field 'X'" + * - typo'd arg key β†’ handler silently treats as missing, agent thinks + * the call succeeded but state never got set + * - Zod errors β†’ raw JSON dump that's hard for both humans and LLMs to + * parse + * + * The validator is generic β€” it walks the tool's JSON Schema `inputSchema` + * and uses `required` + `additionalProperties: false`. Tests below exercise + * a representative sample of real registered tools rather than mocks, so + * any wiring drift surfaces here. + */ + +import { callTool, toolRegistry } from './registry.js'; + +describe('callTool β€” pre-flight arg validation', () => { + describe('missing required field', () => { + it('generate_unique_id with no prefix β†’ friendly error, not "undefined_xxx"', async () => { + const res = await callTool('generate_unique_id', {}); + expect(res.isError).toBe(true); + expect(res.text).toMatch(/Missing required field/); + expect(res.text).toMatch(/prefix/); + expect(res.result).toBeNull(); + }); + + it('generate_unique_id with empty-string prefix β†’ rejected (not "undefined_xxx")', async () => { + const res = await callTool('generate_unique_id', { prefix: '' }); + expect(res.isError).toBe(true); + expect(res.text).toMatch(/Missing required field/); + }); + + it('query_balance with no args β†’ friendly error listing both required fields', async () => { + const res = await callTool('query_balance', {}); + expect(res.isError).toBe(true); + expect(res.text).toMatch(/Missing required fields?/); + expect(res.text).toMatch(/collectionId/); + expect(res.text).toMatch(/address/); + }); + + it('query_balance with one of two required β†’ flags the missing one in the "Missing:" line', async () => { + const res = await callTool('query_balance', { collectionId: '42' }); + expect(res.isError).toBe(true); + // The error message has two parts: "Missing required field: address." + // and "Expected: collectionId, address" (full required list). Check + // that `address` shows up in the missing list. + expect(res.text).toMatch(/Missing required field: address/); + }); + + it('set_valid_token_ids with sessionId only β†’ flags missing tokenIds', async () => { + const res = await callTool('set_valid_token_ids', { sessionId: 'spec-test' }); + expect(res.isError).toBe(true); + expect(res.text).toMatch(/tokenIds/); + }); + + it('set_valid_token_ids with truly-wrong key β†’ catches via missing-required path', async () => { + // LLM agents typo arg names; the validator should refuse the call + // before the handler crashes / silently succeeds. + const res = await callTool('set_valid_token_ids', { + sessionId: 'spec-test', + badgeIds: [{ start: '1', end: '10' }] // wrong key + }); + expect(res.isError).toBe(true); + expect(res.text).toMatch(/tokenIds/); + }); + }); + + describe('unknown tool', () => { + it('returns isError without crashing', async () => { + const res = await callTool('totally_fake_tool_xxx', { foo: 'bar' }); + expect(res.isError).toBe(true); + expect(res.text).toMatch(/Unknown tool/); + expect(res.result).toBeNull(); + }); + }); + + describe('happy path', () => { + it('get_current_timestamp with no args β†’ succeeds (no required fields)', async () => { + const res = await callTool('get_current_timestamp', {}); + expect(res.isError).toBeFalsy(); + expect(res.result).not.toBeNull(); + }); + + it('generate_unique_id with valid prefix β†’ succeeds', async () => { + const res = await callTool('generate_unique_id', { prefix: 'spec-test' }); + expect(res.isError).toBeFalsy(); + expect((res.result as any).success).toBe(true); + expect((res.result as any).ids[0]).toMatch(/^spec-test_[a-f0-9]+$/); + }); + }); + + describe('coverage sanity', () => { + it('all 51+ registered tools have an inputSchema', () => { + // If a tool slips into the registry without an inputSchema, the + // validator silently passes through and the handler is unguarded. + // Catch the regression here. + const missing: string[] = []; + for (const [name, entry] of Object.entries(toolRegistry)) { + const schema = (entry as any).tool?.inputSchema; + if (!schema || typeof schema !== 'object') { + missing.push(name); + } + } + expect(missing).toEqual([]); + }); + }); +}); diff --git a/packages/bitbadgesjs-sdk/src/builder/tools/registry.ts b/packages/bitbadgesjs-sdk/src/builder/tools/registry.ts index 853be3e894..d2bd780313 100644 --- a/packages/bitbadgesjs-sdk/src/builder/tools/registry.ts +++ b/packages/bitbadgesjs-sdk/src/builder/tools/registry.ts @@ -1,15 +1,15 @@ /** * Central tool registry. * - * Single source of truth for every MCP tool. Used by: - * - src/server.ts (MCP stdio server β€” wraps entries into ListTools/CallTool handlers) + * Single source of truth for every builder tool. Used by: + * - src/server.ts (Model Context Protocol (MCP) stdio transport β€” wraps entries into ListTools/CallTool handlers) * - external consumers (e.g. bitbadges-cli) that import this module and invoke - * tools as plain functions, without the MCP protocol. + * tools as plain functions, bypassing the MCP stdio transport. * * Each entry has a `tool` schema (for discovery) and a `run` function that takes * raw args and returns a structured result. An optional `formatText` controls how - * the result is serialized for the MCP text content block; by default we JSON - * stringify. The registry itself is protocol-agnostic β€” it never returns MCP + * the result is serialized for the text content block returned over MCP; by default we JSON + * stringify. The registry itself is protocol-agnostic β€” it never returns transport-shaped * content blocks directly. */ @@ -44,13 +44,12 @@ import { // Dynamic store buildDynamicStoreTool, handleBuildDynamicStore, queryDynamicStoreTool, handleQueryDynamicStore, - // Audit / explain - auditCollectionTool, handleAuditCollection, + // Explain explainCollectionTool, handleExplainCollection, + // Unified review + reviewCollectionTool, handleReviewCollection, // Claim builder buildClaimTool, handleBuildClaim, - // Standards compliance - verifyStandardsCompliance, formatVerificationResult, // Session-based per-field tools (v2) setStandardsTool, handleSetStandards, setValidTokenIdsTool, handleSetValidTokenIds, @@ -86,7 +85,7 @@ export { exportSession, importSession } from '../session/sessionState.js'; // Re-export the resource registry so consumers get tools + resources from one // import. Resources are static documents (token registry, recipes, skill docs, -// error patterns, etc.) β€” the other half of the MCP surface. +// error patterns, etc.) β€” the other half of the builder surface. export { resourceRegistry, listResources, @@ -96,7 +95,7 @@ export { type ReadResourceResult } from '../resources/registry.js'; -/** MCP tool schema shape β€” kept loose to avoid coupling to a specific SDK version. */ +/** Builder tool schema shape β€” kept loose to avoid coupling to a specific SDK version. */ export interface ToolSchema { name: string; description: string; @@ -113,7 +112,7 @@ export interface ToolEntry { /** Invoke the tool. Receives raw args and returns a structured result. */ run: (args: any) => Promise | any; /** - * Optional custom text serializer for MCP content blocks. + * Optional custom text serializer for MCP content blocks when used through the stdio transport. * Defaults to `JSON.stringify(result, null, 2)`. */ formatText?: (result: any) => string; @@ -142,21 +141,8 @@ const getSkillInstructionsTool: ToolSchema = { } }; -const verifyStandardsTool: ToolSchema = { - name: 'verify_standards', - description: - 'Verify that a collection transaction complies with BitBadges protocol standards (subscription, credit token, smart token, etc.). Returns violations with severity levels. Complements audit_collection which covers security β€” this covers standards compliance.', - inputSchema: { - type: 'object', - properties: { - transaction: { type: 'object', description: 'The transaction object to verify (MsgUniversalUpdateCollection or similar)' }, - transactionJson: { type: 'string', description: 'The transaction as a JSON string (alternative to transaction object)' } - } - } -}; - /** - * The tool registry. Keys are MCP tool names. + * The tool registry. Keys are builder tool names. */ export const toolRegistry: Record = { // Utilities @@ -208,8 +194,10 @@ export const toolRegistry: Record = { build_dynamic_store: entry(buildDynamicStoreTool, handleBuildDynamicStore), query_dynamic_store: entry(queryDynamicStoreTool, async (args: any) => await handleQueryDynamicStore(args)), - // Audit / explain / claim - audit_collection: entry(auditCollectionTool, handleAuditCollection), + // Unified review (preferred) + review_collection: entry(reviewCollectionTool, handleReviewCollection), + + // Explain / claim explain_collection: entry( explainCollectionTool, handleExplainCollection, @@ -217,21 +205,6 @@ export const toolRegistry: Record = { ), build_claim: entry(buildClaimTool, handleBuildClaim), - // Standards compliance (custom arg handling + formatter) - verify_standards: { - tool: verifyStandardsTool, - run: (args: any) => { - let tx = args; - if (typeof args?.transactionJson === 'string') { - tx = JSON.parse(args.transactionJson); - } else if (args?.transaction) { - tx = args.transaction; - } - return verifyStandardsCompliance(tx); - }, - formatText: (result: any) => formatVerificationResult(result) - }, - // Session-based per-field tools (v2) set_standards: entry(setStandardsTool, handleSetStandards), set_valid_token_ids: entry(setValidTokenIdsTool, handleSetValidTokenIds), @@ -272,6 +245,80 @@ export interface CallToolResult { isError?: boolean; } +/** + * Pre-flight check against `tool.inputSchema` (JSON-Schema-shaped). Catches + * the two LLM-agent footguns that handlers were silently tolerating: + * + * 1. **Missing required field** β€” handler reads `args.foo.toString()` and + * crashes with "Cannot read properties of undefined". An LLM agent + * gets a stack trace instead of "missing required field 'foo'". + * + * 2. **Wrong arg key** β€” agent passes `tokenIds` instead of + * `validTokenIds`. Without `additionalProperties: false`, the handler + * silently treats the field as missing and proceeds; state never + * gets set; the agent thinks the call succeeded. + * + * We check JSON Schema `required` always, and optionally `additionalProperties` + * when the tool's schema declares it `false`. Tools that DON'T set + * `additionalProperties: false` retain their existing tolerant behavior + * (some legitimately accept arbitrary kwargs). + */ +function preflightArgs(tool: any, args: any): { ok: true } | { ok: false; error: string } { + const schema = tool?.inputSchema; + if (!schema || typeof schema !== 'object') return { ok: true }; + const argsObj = args && typeof args === 'object' && !Array.isArray(args) ? args : {}; + + // Required fields + if (Array.isArray(schema.required) && schema.required.length > 0) { + const missing: string[] = []; + for (const key of schema.required) { + if (argsObj[key] === undefined || argsObj[key] === null || argsObj[key] === '') { + missing.push(key); + } + } + if (missing.length > 0) { + const expected = Array.isArray(schema.required) ? schema.required.join(', ') : ''; + return { + ok: false, + error: `Missing required field${missing.length > 1 ? 's' : ''}: ${missing.join(', ')}.${expected ? ` Expected: ${expected}` : ''}` + }; + } + } + + // Unknown fields (only when explicitly closed) + if (schema.additionalProperties === false && schema.properties && typeof schema.properties === 'object') { + const allowed = new Set(Object.keys(schema.properties)); + const unknown = Object.keys(argsObj).filter((k) => !allowed.has(k)); + if (unknown.length > 0) { + const allowedList = [...allowed].join(', '); + return { + ok: false, + error: `Unknown field${unknown.length > 1 ? 's' : ''}: ${unknown.join(', ')}. Allowed: ${allowedList}.` + }; + } + } + return { ok: true }; +} + +/** + * Format a thrown error for tool consumers. Recognizes Zod issues and + * renders them as `path: message` lines instead of a giant JSON dump. + * Falls back to plain `error.message` for anything else. + */ +function formatToolError(err: unknown): string { + if (!err || typeof err !== 'object') return String(err); + const e = err as any; + // Zod errors have `issues: ZodIssue[]` with `path` + `message` per entry. + if (Array.isArray(e.issues) && e.issues.length > 0 && e.issues[0]?.message) { + const lines = e.issues.map((i: any) => { + const path = Array.isArray(i.path) && i.path.length > 0 ? i.path.join('.') : '(root)'; + return `${path}: ${i.message}`; + }); + return `Invalid input β€” ${e.issues.length} issue${e.issues.length > 1 ? 's' : ''}:\n ${lines.join('\n ')}`; + } + return e.message ? String(e.message) : String(err); +} + /** * Invoke a tool by name. Never throws β€” errors are captured into the result. */ @@ -280,13 +327,21 @@ export async function callTool(name: string, args: any): Promise if (!tool) { return { text: `Unknown tool: ${name}`, result: null, isError: true }; } + // Centralized pre-flight: catches missing-required and unknown-field + // mistakes BEFORE the handler runs. Without this, handlers dereferencing + // a missing field would crash with "Cannot read properties of undefined" + // and agents would get a stack trace instead of a structured error. + const pre = preflightArgs(tool.tool, args); + if (!pre.ok) { + return { text: `Error: ${pre.error}`, result: null, isError: true }; + } try { const result = await tool.run(args); const text = tool.formatText ? tool.formatText(result) : JSON.stringify(result, null, 2); return { text, result }; } catch (error) { return { - text: `Error: ${error instanceof Error ? error.message : String(error)}`, + text: `Error: ${formatToolError(error)}`, result: null, isError: true }; diff --git a/packages/bitbadgesjs-sdk/src/builder/tools/session/addAliasPath.ts b/packages/bitbadgesjs-sdk/src/builder/tools/session/addAliasPath.ts index 636106a86f..6c75ac642b 100644 --- a/packages/bitbadgesjs-sdk/src/builder/tools/session/addAliasPath.ts +++ b/packages/bitbadgesjs-sdk/src/builder/tools/session/addAliasPath.ts @@ -1,9 +1,21 @@ import { z } from 'zod'; -import { addAliasPath as addAliasPathToSession, getOrCreateSession } from '../../session/sessionState.js'; +import { addAliasPath as addAliasPathToSession, getOrCreateSession, getMsgPlaceholders } from '../../session/sessionState.js'; export const addAliasPathSchema = z.object({ sessionId: z.string().optional().describe("Session ID for per-request isolation."), creatorAddress: z.string().optional(), + // Off-chain metadata for the path-level placeholder URI. These are stored + // in the session's metadataPlaceholders sidecar (NOT on the proto) and + // referenced by metadata.uri. The metadata auto-apply flow uploads the + // sidecar entries as off-chain JSON and substitutes the placeholder URIs. + pathName: z.string().optional().describe('Display name for this alias path (off-chain metadata). Stored in metadataPlaceholders sidecar.'), + pathDescription: z.string().optional().describe('1-2 sentence description for this alias path (off-chain metadata).'), + pathImage: z.string().optional().describe('Image URL or IMAGE_N placeholder for this alias path (off-chain metadata).'), + // Optional per-denomUnit display content. Indexed by unit position. Same + // routing β€” sidecar only, not the proto. + denomUnitName: z.string().optional().describe('Display name for the default denom unit (off-chain).'), + denomUnitDescription: z.string().optional().describe('Description for the default denom unit (off-chain).'), + denomUnitImage: z.string().optional().describe('Image URL or IMAGE_N placeholder for the default denom unit (off-chain).'), aliasPath: z.object({ denom: z.string().describe('Base denom symbol (e.g., "uvatom", "uwusdc"). Must only contain a-zA-Z, _, {, }, -. NEVER use raw IBC denom (ibc/...).'), symbol: z.string().describe('Same as denom for the base unit.'), @@ -15,16 +27,18 @@ export const addAliasPathSchema = z.object({ decimals: z.string().describe('Decimal places as string. Must match IBC denom decimals (e.g., "6" for ATOM/USDC).'), symbol: z.string().describe('Display symbol (e.g., "vATOM", "wUSDC"). Do NOT reuse reserved symbols.'), isDefaultDisplay: z.boolean().optional(), + // PathMetadata only has { uri, customData }. Do NOT add an image + // field here β€” images live inside the off-chain JSON at metadata.uri + // and are handled by the metadata auto-apply flow. metadata: z.object({ uri: z.string().optional().default(''), - customData: z.string().optional().default(''), - image: z.string().describe('Token logo URL. REQUIRED.') + customData: z.string().optional().default('') }).optional() })).describe('Display units with decimals > 0 ONLY. Base decimals (0) is implicit.'), + // PathMetadata only has { uri, customData }. See note on denomUnits. metadata: z.object({ uri: z.string().optional().default(''), - customData: z.string().optional().default(''), - image: z.string().describe('Token logo URL. REQUIRED for alias paths.') + customData: z.string().optional().default('') }).optional() }) }); @@ -33,12 +47,18 @@ export type AddAliasPathInput = z.infer; export const addAliasPathTool = { name: 'add_alias_path', - description: 'Add an alias path for ICS20-backed tokens or liquidity pools. Required for smart tokens. Decimals must match the IBC denom decimals. All metadata MUST include an image field.', + description: 'Add an alias path for ICS20-backed tokens or liquidity pools. Required for smart tokens. Decimals must match the IBC denom decimals. PathMetadata on-chain only has { uri, customData }; pass pathName / pathDescription / pathImage (and denomUnitName / Description / Image) as TOP-LEVEL params and they will be routed into the off-chain metadataPlaceholders sidecar keyed by an auto-generated placeholder URI.', inputSchema: { type: 'object' as const, properties: { sessionId: { type: 'string', description: 'Session ID.' }, creatorAddress: { type: 'string' }, + pathName: { type: 'string', description: 'Display name for the alias path. Stored off-chain in metadataPlaceholders.' }, + pathDescription: { type: 'string', description: '1-2 sentence description for the alias path. Off-chain.' }, + pathImage: { type: 'string', description: 'Image URL or IMAGE_N placeholder for the alias path. Off-chain.' }, + denomUnitName: { type: 'string', description: 'Display name for the default denom unit. Off-chain.' }, + denomUnitDescription: { type: 'string', description: 'Description for the default denom unit. Off-chain.' }, + denomUnitImage: { type: 'string', description: 'Image URL or IMAGE_N placeholder for the default denom unit. Off-chain.' }, aliasPath: { type: 'object', description: 'Alias path configuration for ICS20-backed tokens.', @@ -82,10 +102,10 @@ export const addAliasPathTool = { isDefaultDisplay: { type: 'boolean', description: 'Whether this is the default display unit.' }, metadata: { type: 'object', + description: 'PathMetadata β€” ONLY { uri, customData }. No image/name/description at this level.', properties: { - uri: { type: 'string' }, - customData: { type: 'string' }, - image: { type: 'string', description: 'Token logo URL. REQUIRED.' } + uri: { type: 'string', description: 'Placeholder URI like "ipfs://METADATA_ALIAS__UNIT_". The metadata auto-apply flow substitutes real URIs after the off-chain JSON (with image/name/description) is uploaded.' }, + customData: { type: 'string' } } } }, @@ -94,11 +114,10 @@ export const addAliasPathTool = { }, metadata: { type: 'object', - description: 'Path-level metadata.', + description: 'Path-level PathMetadata β€” ONLY { uri, customData }. The image/name/description live inside the off-chain JSON at metadata.uri, not on this proto.', properties: { - uri: { type: 'string' }, - customData: { type: 'string' }, - image: { type: 'string', description: 'Token logo URL. REQUIRED for alias paths.' } + uri: { type: 'string', description: 'Placeholder URI like "ipfs://METADATA_ALIAS_". Substituted by the metadata auto-apply flow.' }, + customData: { type: 'string' } } } }, @@ -125,20 +144,87 @@ export function handleAddAliasPath(input: AddAliasPathInput) { return { success: false, error: `Symbol "${symbol}" contains invalid characters. Only a-zA-Z, _, {, }, - are allowed.` }; } - // Propagate path-level image to denomUnits that are missing metadata/image - const pathImage = input.aliasPath.metadata?.image || ''; + // PathMetadata only has { uri, customData }. An `image` field at this + // level is invalid proto. Strip any inbound `image` field and ensure + // every PathMetadata has a placeholder uri the metadata auto-apply flow + // can substitute. We then route any image/name/description the caller + // passed (either via the new top-level params or, defensively, via the + // legacy nested-on-metadata shape) into the session's metadataPlaceholders + // sidecar. Nothing about the image goes onto the on-chain proto. + let legacyPathImage: string | undefined; + let legacyPathName: string | undefined; + let legacyPathDescription: string | undefined; + if (input.aliasPath.metadata) { + const { image, name, description, ...cleanPathMetadata } = input.aliasPath.metadata as any; + legacyPathImage = typeof image === 'string' ? image : undefined; + legacyPathName = typeof name === 'string' ? name : undefined; + legacyPathDescription = typeof description === 'string' ? description : undefined; + input.aliasPath.metadata = { + uri: cleanPathMetadata.uri || `ipfs://METADATA_ALIAS_${denom}`, + customData: cleanPathMetadata.customData || '' + }; + } else { + input.aliasPath.metadata = { uri: `ipfs://METADATA_ALIAS_${denom}`, customData: '' }; + } + const pathUri = input.aliasPath.metadata!.uri as string; + + // Track the default denom unit URI so we can attach off-chain metadata to + // the right placeholder. + let defaultUnitUri: string | undefined; + let legacyUnitImage: string | undefined; + let legacyUnitName: string | undefined; + let legacyUnitDescription: string | undefined; if (input.aliasPath.denomUnits && Array.isArray(input.aliasPath.denomUnits)) { - input.aliasPath.denomUnits = input.aliasPath.denomUnits.map((unit: any) => { - if (!unit.metadata) { - unit.metadata = { uri: '', customData: '', image: pathImage }; - } else if (!unit.metadata.image) { - unit.metadata.image = pathImage; + input.aliasPath.denomUnits = input.aliasPath.denomUnits.map((unit: any, idx: number) => { + if (!unit.metadata || typeof unit.metadata !== 'object') { + unit.metadata = { uri: `ipfs://METADATA_ALIAS_${denom}_UNIT_${idx}`, customData: '' }; + } else { + const { image, name, description, ...cleanUnitMetadata } = unit.metadata as any; + if (unit.isDefaultDisplay || idx === 0) { + if (typeof image === 'string') legacyUnitImage = image; + if (typeof name === 'string') legacyUnitName = name; + if (typeof description === 'string') legacyUnitDescription = description; + } + unit.metadata = { + uri: cleanUnitMetadata.uri || `ipfs://METADATA_ALIAS_${denom}_UNIT_${idx}`, + customData: cleanUnitMetadata.customData || '' + }; + } + if ((unit.isDefaultDisplay || idx === 0) && !defaultUnitUri) { + defaultUnitUri = unit.metadata.uri; } return unit; }); } - getOrCreateSession(input.sessionId, input.creatorAddress); + // Persist the alias path itself into session state. + const session = getOrCreateSession(input.sessionId, input.creatorAddress); addAliasPathToSession(input.sessionId, input.aliasPath); + const placeholders = getMsgPlaceholders(session); + + // Route off-chain metadata into the per-msg metadataPlaceholders sidecar + // at `messages[0].value._meta.metadataPlaceholders`. Top-level params + // win over legacy nested-on-metadata fields if both are set. + const pathName = input.pathName ?? legacyPathName; + const pathDescription = input.pathDescription ?? legacyPathDescription; + const pathImage = input.pathImage ?? legacyPathImage; + if (pathName || pathDescription || pathImage) { + placeholders[pathUri] = { + name: pathName || placeholders[pathUri]?.name || `${denom} alias path`, + description: pathDescription || placeholders[pathUri]?.description || '', + image: pathImage || placeholders[pathUri]?.image || '' + }; + } + const unitName = input.denomUnitName ?? legacyUnitName; + const unitDescription = input.denomUnitDescription ?? legacyUnitDescription; + const unitImage = input.denomUnitImage ?? legacyUnitImage; + if (defaultUnitUri && (unitName || unitDescription || unitImage)) { + placeholders[defaultUnitUri] = { + name: unitName || placeholders[defaultUnitUri]?.name || `${denom} default unit`, + description: unitDescription || placeholders[defaultUnitUri]?.description || '', + image: unitImage || placeholders[defaultUnitUri]?.image || '' + }; + } + return { success: true, denom: input.aliasPath.denom }; } diff --git a/packages/bitbadgesjs-sdk/src/builder/tools/session/addCosmosWrapperPath.ts b/packages/bitbadgesjs-sdk/src/builder/tools/session/addCosmosWrapperPath.ts index 40a7cda84d..1ef1311282 100644 --- a/packages/bitbadgesjs-sdk/src/builder/tools/session/addCosmosWrapperPath.ts +++ b/packages/bitbadgesjs-sdk/src/builder/tools/session/addCosmosWrapperPath.ts @@ -19,13 +19,23 @@ */ import { z } from 'zod'; -import { addCosmosWrapperPath as addCosmosWrapperPathToSession, getOrCreateSession } from '../../session/sessionState.js'; +import { addCosmosWrapperPath as addCosmosWrapperPathToSession, getOrCreateSession, getMsgPlaceholders } from '../../session/sessionState.js'; const VALID_CHARS = /^[a-zA-Z_{}-]+$/; export const addCosmosWrapperPathSchema = z.object({ sessionId: z.string().optional().describe("Session ID for per-request isolation."), creatorAddress: z.string().optional(), + // Off-chain metadata for the path-level placeholder URI. Stored in the + // session's metadataPlaceholders sidecar and referenced by metadata.uri. + // The metadata auto-apply flow uploads the sidecar entries as off-chain + // JSON and substitutes the placeholder URIs. + pathName: z.string().optional().describe('Display name for this wrapper path (off-chain metadata).'), + pathDescription: z.string().optional().describe('1-2 sentence description for this wrapper path (off-chain metadata).'), + pathImage: z.string().optional().describe('Image URL or IMAGE_N placeholder for this wrapper path (off-chain metadata).'), + denomUnitName: z.string().optional().describe('Display name for the default denom unit (off-chain).'), + denomUnitDescription: z.string().optional().describe('Description for the default denom unit (off-chain).'), + denomUnitImage: z.string().optional().describe('Image URL or IMAGE_N placeholder for the default denom unit (off-chain).'), wrapperPath: z.object({ denom: z.string().describe('Custom denom for the wrapped ICS20 coin (e.g., "utoken", "uwrapped"). Must only contain a-zA-Z, _, {, }, -. This creates a NEW denom β€” do NOT use an existing IBC denom.'), symbol: z.string().describe('Symbol for the base unit. Usually same as denom.'), @@ -39,17 +49,19 @@ export const addCosmosWrapperPathSchema = z.object({ decimals: z.string().describe('Display decimals for this unit (e.g., "6"). Min 1, max 18.'), symbol: z.string().describe('Display symbol (e.g., "TOKEN"). Must only contain a-zA-Z, _, {, }, -.'), isDefaultDisplay: z.boolean().optional().describe('Whether this is the default display unit.'), + // PathMetadata only has { uri, customData }. Image/name/description + // live inside the off-chain JSON at metadata.uri and are handled by + // the metadata auto-apply flow. metadata: z.object({ uri: z.string().optional().default(''), - customData: z.string().optional().default(''), - image: z.string().describe('Token logo URL. REQUIRED.') + customData: z.string().optional().default('') }).optional() })).describe('Denomination units for display. At least one with isDefaultDisplay: true required.'), allowOverrideWithAnyValidToken: z.boolean().optional().default(false).describe('If true, users can choose any valid token ID to wrap. If false (default), must match exact tokenIds in conversion.'), + // PathMetadata only has { uri, customData }. See note on denomUnits. metadata: z.object({ uri: z.string().optional().default(''), - customData: z.string().optional().default(''), - image: z.string().optional().describe('Token logo URL. Recommended for display.') + customData: z.string().optional().default('') }).optional() }) }); @@ -58,12 +70,18 @@ export type AddCosmosWrapperPathInput = z.infer_UNIT_". Substituted by the metadata auto-apply flow.' }, + customData: { type: 'string' } } } }, @@ -120,11 +138,10 @@ export const addCosmosWrapperPathTool = { allowOverrideWithAnyValidToken: { type: 'boolean', description: 'If true, users can choose any valid token ID to wrap. Default false.' }, metadata: { type: 'object', - description: 'Path-level metadata.', + description: 'Path-level PathMetadata β€” ONLY { uri, customData }. Image/name/description live inside the off-chain JSON at metadata.uri.', properties: { - uri: { type: 'string' }, - customData: { type: 'string' }, - image: { type: 'string', description: 'Token logo URL. Recommended for display.' } + uri: { type: 'string', description: 'Placeholder URI like "ipfs://METADATA_WRAPPER_". Substituted by the metadata auto-apply flow.' }, + customData: { type: 'string' } } } }, @@ -150,21 +167,80 @@ export function handleAddCosmosWrapperPath(input: AddCosmosWrapperPathInput) { return { success: false, error: `Symbol "${symbol}" contains invalid characters. Only a-zA-Z, _, {, }, - are allowed.` }; } - // Propagate path-level image to denomUnits that are missing metadata/image - const pathImage = input.wrapperPath.metadata?.image || ''; + // PathMetadata only has { uri, customData }. Strip any inbound `image` + // (or other invalid fields) and ensure every PathMetadata has a + // placeholder uri the metadata auto-apply flow can substitute. Then + // route off-chain metadata into the session's metadataPlaceholders + // sidecar β€” NEVER onto the proto. + let legacyPathImage: string | undefined; + let legacyPathName: string | undefined; + let legacyPathDescription: string | undefined; + if (input.wrapperPath.metadata) { + const { image, name, description, ...cleanPathMetadata } = input.wrapperPath.metadata as any; + legacyPathImage = typeof image === 'string' ? image : undefined; + legacyPathName = typeof name === 'string' ? name : undefined; + legacyPathDescription = typeof description === 'string' ? description : undefined; + input.wrapperPath.metadata = { + uri: cleanPathMetadata.uri || `ipfs://METADATA_WRAPPER_${denom}`, + customData: cleanPathMetadata.customData || '' + }; + } else { + input.wrapperPath.metadata = { uri: `ipfs://METADATA_WRAPPER_${denom}`, customData: '' }; + } + const pathUri = input.wrapperPath.metadata!.uri as string; + + let defaultUnitUri: string | undefined; + let legacyUnitImage: string | undefined; + let legacyUnitName: string | undefined; + let legacyUnitDescription: string | undefined; if (input.wrapperPath.denomUnits && Array.isArray(input.wrapperPath.denomUnits)) { - input.wrapperPath.denomUnits = input.wrapperPath.denomUnits.map((unit: any) => { - if (!unit.metadata) { - unit.metadata = { uri: '', customData: '', image: pathImage }; - } else if (!unit.metadata.image) { - unit.metadata.image = pathImage; + input.wrapperPath.denomUnits = input.wrapperPath.denomUnits.map((unit: any, idx: number) => { + if (!unit.metadata || typeof unit.metadata !== 'object') { + unit.metadata = { uri: `ipfs://METADATA_WRAPPER_${denom}_UNIT_${idx}`, customData: '' }; + } else { + const { image, name, description, ...cleanUnitMetadata } = unit.metadata as any; + if (unit.isDefaultDisplay || idx === 0) { + if (typeof image === 'string') legacyUnitImage = image; + if (typeof name === 'string') legacyUnitName = name; + if (typeof description === 'string') legacyUnitDescription = description; + } + unit.metadata = { + uri: cleanUnitMetadata.uri || `ipfs://METADATA_WRAPPER_${denom}_UNIT_${idx}`, + customData: cleanUnitMetadata.customData || '' + }; + } + if ((unit.isDefaultDisplay || idx === 0) && !defaultUnitUri) { + defaultUnitUri = unit.metadata.uri; } return unit; }); } - getOrCreateSession(input.sessionId, input.creatorAddress); + const session = getOrCreateSession(input.sessionId, input.creatorAddress); addCosmosWrapperPathToSession(input.sessionId, input.wrapperPath); + const placeholders = getMsgPlaceholders(session); + + const pathName = input.pathName ?? legacyPathName; + const pathDescription = input.pathDescription ?? legacyPathDescription; + const pathImage = input.pathImage ?? legacyPathImage; + if (pathName || pathDescription || pathImage) { + placeholders[pathUri] = { + name: pathName || placeholders[pathUri]?.name || `${denom} wrapper path`, + description: pathDescription || placeholders[pathUri]?.description || '', + image: pathImage || placeholders[pathUri]?.image || '' + }; + } + const unitName = input.denomUnitName ?? legacyUnitName; + const unitDescription = input.denomUnitDescription ?? legacyUnitDescription; + const unitImage = input.denomUnitImage ?? legacyUnitImage; + if (defaultUnitUri && (unitName || unitDescription || unitImage)) { + placeholders[defaultUnitUri] = { + name: unitName || placeholders[defaultUnitUri]?.name || `${denom} default unit`, + description: unitDescription || placeholders[defaultUnitUri]?.description || '', + image: unitImage || placeholders[defaultUnitUri]?.image || '' + }; + } + return { success: true, denom: input.wrapperPath.denom, diff --git a/packages/bitbadgesjs-sdk/src/builder/tools/session/getTransaction.ts b/packages/bitbadgesjs-sdk/src/builder/tools/session/getTransaction.ts index 2824e201a5..4f9a720ba9 100644 --- a/packages/bitbadgesjs-sdk/src/builder/tools/session/getTransaction.ts +++ b/packages/bitbadgesjs-sdk/src/builder/tools/session/getTransaction.ts @@ -1,5 +1,6 @@ import { z } from 'zod'; import { getTransaction as getTransactionFromSession, getOrCreateSession, ensureStringNumbers } from '../../session/sessionState.js'; +import { normalizeTxMessages } from '../../../cli/utils/normalizeMsg.js'; export const getTransactionSchema = z.object({ sessionId: z.string().optional().describe("Session ID for per-request isolation."), @@ -50,5 +51,9 @@ export function handleGetTransaction(input: GetTransactionInput) { const sanitized = ensureStringNumbers(transaction); // Replace any unreplaced IMAGE_N placeholders with default logo const cleaned = replaceUnresolvedImagePlaceholders(sanitized); - return { success: true, transaction: cleaned }; + // Narrow Universal β†’ MsgCreateCollection / MsgUpdateCollection at this + // agent-facing boundary. Session storage stays on Universal (superset) + // so internal mutators don't have to branch on message type. + const normalized = normalizeTxMessages(cleaned); + return { success: true, transaction: normalized }; } diff --git a/packages/bitbadgesjs-sdk/src/builder/tools/utilities/fetchDocs.ts b/packages/bitbadgesjs-sdk/src/builder/tools/utilities/fetchDocs.ts index 9d88be3af3..e39fc77bcc 100644 --- a/packages/bitbadgesjs-sdk/src/builder/tools/utilities/fetchDocs.ts +++ b/packages/bitbadgesjs-sdk/src/builder/tools/utilities/fetchDocs.ts @@ -91,8 +91,10 @@ const TOPIC_URL_MAP: Record = { 'ai agents': 'https://docs.bitbadges.io/for-developers/ai-agents', 'ai': 'https://docs.bitbadges.io/for-developers/ai-agents', 'bots': 'https://docs.bitbadges.io/for-developers/ai-agents', - 'mcp': 'https://docs.bitbadges.io/for-developers/ai-agents/mcp-builder-tools', - 'mcp tools': 'https://docs.bitbadges.io/for-developers/ai-agents/mcp-builder-tools', + 'builder': 'https://docs.bitbadges.io/for-developers/ai-agents/builder-tools', + 'builder tools': 'https://docs.bitbadges.io/for-developers/ai-agents/builder-tools', + 'mcp': 'https://docs.bitbadges.io/for-developers/ai-agents/builder-tools', + 'mcp tools': 'https://docs.bitbadges.io/for-developers/ai-agents/builder-tools', 'bot examples': 'https://docs.bitbadges.io/for-developers/ai-agents/bot-examples', 'websocket': 'https://docs.bitbadges.io/for-developers/ai-agents/websocket-events', 'websocket events': 'https://docs.bitbadges.io/for-developers/ai-agents/websocket-events', @@ -143,6 +145,49 @@ function findTopicUrl(topic: string): string | null { return null; } +/** + * Strip a block-level HTML tag (and its contents) from a string. + * + * Uses a tempered-greedy token so nested `<` characters inside the block are + * consumed, and a flexible closing tag pattern that matches `` with + * trailing whitespace. Runs in a loop until the output is stable so partial + * bypasses like `ipt>...` β€” where a single pass would + * leave `